mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-25 00:52:02 +00:00
Enabling all TestFlows modules and fixing some tests.
This commit is contained in:
parent
c72a1be0be
commit
999ce1c867
@ -1035,10 +1035,12 @@ a
|
|||||||
|
|
||||||
mapAdd_with_Int128_on_a_table = r"""
|
mapAdd_with_Int128_on_a_table = r"""
|
||||||
a
|
a
|
||||||
|
([1,2],[2,4])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mapSubtract_with_Int128_on_a_table = r"""
|
mapSubtract_with_Int128_on_a_table = r"""
|
||||||
a
|
a
|
||||||
|
([1,2],[0,0])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mapPopulateSeries_with_Int128_on_a_table = r"""
|
mapPopulateSeries_with_Int128_on_a_table = r"""
|
||||||
@ -1563,10 +1565,12 @@ a
|
|||||||
|
|
||||||
mapAdd_with_Int256_on_a_table = r"""
|
mapAdd_with_Int256_on_a_table = r"""
|
||||||
a
|
a
|
||||||
|
([1,2],[2,4])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mapSubtract_with_Int256_on_a_table = r"""
|
mapSubtract_with_Int256_on_a_table = r"""
|
||||||
a
|
a
|
||||||
|
([1,2],[0,0])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mapPopulateSeries_with_Int256_on_a_table = r"""
|
mapPopulateSeries_with_Int256_on_a_table = r"""
|
||||||
@ -2091,10 +2095,12 @@ a
|
|||||||
|
|
||||||
mapAdd_with_UInt128_on_a_table = r"""
|
mapAdd_with_UInt128_on_a_table = r"""
|
||||||
a
|
a
|
||||||
|
([1,2],[2,4])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mapSubtract_with_UInt128_on_a_table = r"""
|
mapSubtract_with_UInt128_on_a_table = r"""
|
||||||
a
|
a
|
||||||
|
([1,2],[0,0])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mapPopulateSeries_with_UInt128_on_a_table = r"""
|
mapPopulateSeries_with_UInt128_on_a_table = r"""
|
||||||
@ -2619,10 +2625,12 @@ a
|
|||||||
|
|
||||||
mapAdd_with_UInt256_on_a_table = r"""
|
mapAdd_with_UInt256_on_a_table = r"""
|
||||||
a
|
a
|
||||||
|
([1,2],[2,4])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mapSubtract_with_UInt256_on_a_table = r"""
|
mapSubtract_with_UInt256_on_a_table = r"""
|
||||||
a
|
a
|
||||||
|
([1,2],[0,0])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mapPopulateSeries_with_UInt256_on_a_table = r"""
|
mapPopulateSeries_with_UInt256_on_a_table = r"""
|
||||||
@ -6280,3 +6288,43 @@ a
|
|||||||
\N
|
\N
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
mapAdd_with_Int128 = r"""
|
||||||
|
mapAdd(tuple(array(toInt128(\'1\'), toInt128(\'2\')), array(toInt128(\'1\'), toInt128(\'2\'))), tuple(array(toInt128(\'1\'), toInt128(\'2\')), array(toInt128(\'1\'), toInt128(\'2\'))))
|
||||||
|
([1,2],[2,4])
|
||||||
|
"""
|
||||||
|
|
||||||
|
mapSubtract_with_Int128 = r"""
|
||||||
|
mapSubtract(tuple(array(toInt128(\'1\'), toInt128(\'2\')), array(toInt128(\'1\'), toInt128(\'2\'))), tuple(array(toInt128(\'1\'), toInt128(\'2\')), array(toInt128(\'1\'), toInt128(\'2\'))))
|
||||||
|
([1,2],[0,0])
|
||||||
|
"""
|
||||||
|
|
||||||
|
mapAdd_with_Int256 = r"""
|
||||||
|
mapAdd(tuple(array(toInt256(\'1\'), toInt256(\'2\')), array(toInt256(\'1\'), toInt256(\'2\'))), tuple(array(toInt256(\'1\'), toInt256(\'2\')), array(toInt256(\'1\'), toInt256(\'2\'))))
|
||||||
|
([1,2],[2,4])
|
||||||
|
"""
|
||||||
|
|
||||||
|
mapSubtract_with_Int256 = r"""
|
||||||
|
mapSubtract(tuple(array(toInt256(\'1\'), toInt256(\'2\')), array(toInt256(\'1\'), toInt256(\'2\'))), tuple(array(toInt256(\'1\'), toInt256(\'2\')), array(toInt256(\'1\'), toInt256(\'2\'))))
|
||||||
|
([1,2],[0,0])
|
||||||
|
"""
|
||||||
|
|
||||||
|
mapAdd_with_UInt128 = r"""
|
||||||
|
mapAdd(tuple(array(toUInt128(\'1\'), toUInt128(\'2\')), array(toUInt128(\'1\'), toUInt128(\'2\'))), tuple(array(toUInt128(\'1\'), toUInt128(\'2\')), array(toUInt128(\'1\'), toUInt128(\'2\'))))
|
||||||
|
([1,2],[2,4])
|
||||||
|
"""
|
||||||
|
|
||||||
|
mapSubtract_with_UInt128 = r"""
|
||||||
|
mapSubtract(tuple(array(toUInt128(\'1\'), toUInt128(\'2\')), array(toUInt128(\'1\'), toUInt128(\'2\'))), tuple(array(toUInt128(\'1\'), toUInt128(\'2\')), array(toUInt128(\'1\'), toUInt128(\'2\'))))
|
||||||
|
([1,2],[0,0])
|
||||||
|
"""
|
||||||
|
|
||||||
|
mapAdd_with_UInt256 = r"""
|
||||||
|
mapAdd(tuple(array(toUInt256(\'1\'), toUInt256(\'2\')), array(toUInt256(\'1\'), toUInt256(\'2\'))), tuple(array(toUInt256(\'1\'), toUInt256(\'2\')), array(toUInt256(\'1\'), toUInt256(\'2\'))))
|
||||||
|
([1,2],[2,4])
|
||||||
|
"""
|
||||||
|
|
||||||
|
mapSubtract_with_UInt256 = r"""
|
||||||
|
mapSubtract(tuple(array(toUInt256(\'1\'), toUInt256(\'2\')), array(toUInt256(\'1\'), toUInt256(\'2\'))), tuple(array(toUInt256(\'1\'), toUInt256(\'2\')), array(toUInt256(\'1\'), toUInt256(\'2\'))))
|
||||||
|
([1,2],[0,0])
|
||||||
|
"""
|
||||||
|
|
||||||
|
@ -50,18 +50,16 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Array({data_type})')
|
table(name = table_name, data_type = f'Array({data_type})')
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
|
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
for func in ['arraySplit((x, y) -> x=y, [0, 0, 0],']:
|
for func in ['arraySplit((x, y) -> x=y, [0, 0, 0],']:
|
||||||
|
|
||||||
with Scenario(f"Inline - {data_type} - {func})"):
|
with Scenario(f"Inline - {data_type} - {func})"):
|
||||||
execute_query(f"""
|
execute_query(f"SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)},"
|
||||||
SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))
|
f"{to_data_type(data_type,1)}))")
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"Table - {data_type} - {func})"):
|
with Scenario(f"Table - {data_type} - {func})"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -69,18 +67,15 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Array(Array({data_type}))')
|
table(name = table_name, data_type = f'Array(Array({data_type}))')
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
|
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
for func in [f'arrayZip([{to_data_type(data_type,1)}],']:
|
for func in [f'arrayZip([{to_data_type(data_type,1)}],']:
|
||||||
|
|
||||||
with Scenario(f"Inline - {data_type} - {func})"):
|
with Scenario(f"Inline - {data_type} - {func})"):
|
||||||
execute_query(f"""
|
execute_query(f"SELECT {func}array({to_data_type(data_type,3)}))")
|
||||||
SELECT {func}array({to_data_type(data_type,3)}))
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"Table - {data_type} - {func})"):
|
with Scenario(f"Table - {data_type} - {func})"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -90,9 +85,7 @@ def array_func(self, data_type, node=None):
|
|||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,1)}))")
|
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,1)}))")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
for func in ['empty(',
|
for func in ['empty(',
|
||||||
'notEmpty(',
|
'notEmpty(',
|
||||||
@ -125,20 +118,17 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = data_type)
|
table(name = table_name, data_type = data_type)
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))",
|
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))",
|
||||||
exitcode = 44, message = 'Exception:')
|
exitcode = 44, message = 'Exception:')
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
with Scenario(f"Inline - {data_type} - {func})"):
|
with Scenario(f"Inline - {data_type} - {func})"):
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
|
||||||
SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"Table - {data_type} - {func})"):
|
with Scenario(f"Table - {data_type} - {func})"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -146,11 +136,10 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = data_type)
|
table(name = table_name, data_type = data_type)
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
|
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
for func in ['arrayDifference(',
|
for func in ['arrayDifference(',
|
||||||
'arrayCumSum(',
|
'arrayCumSum(',
|
||||||
@ -171,12 +160,11 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = data_type)
|
table(name = table_name, data_type = data_type)
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}))",
|
node.query(f"INSERT INTO {table_name} SELECT {func}array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}))",
|
||||||
exitcode = exitcode, message = 'Exception:')
|
exitcode = exitcode, message = 'Exception:')
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
for func in ['arrayElement']:
|
for func in ['arrayElement']:
|
||||||
|
|
||||||
@ -192,20 +180,18 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = data_type)
|
table(name = table_name, data_type = data_type)
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
|
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
for func in ['arrayPushBack',
|
for func in ['arrayPushBack',
|
||||||
'arrayPushFront']:
|
'arrayPushFront']:
|
||||||
|
|
||||||
with Scenario(f"Inline - {data_type} - {func}"):
|
with Scenario(f"Inline - {data_type} - {func}"):
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)},"
|
||||||
SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), {to_data_type(data_type,1)})
|
f"{to_data_type(data_type,1)}), {to_data_type(data_type,1)})")
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"Table - {data_type} - {func}"):
|
with Scenario(f"Table - {data_type} - {func}"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -213,20 +199,18 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Array({data_type})')
|
table(name = table_name, data_type = f'Array({data_type})')
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), {to_data_type(data_type,1)})")
|
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), {to_data_type(data_type,1)})")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
for func in ['arrayResize',
|
for func in ['arrayResize',
|
||||||
'arraySlice']:
|
'arraySlice']:
|
||||||
|
|
||||||
with Scenario(f"Inline - {data_type} - {func}"):
|
with Scenario(f"Inline - {data_type} - {func}"):
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT {func}(array({to_data_type(data_type,3)},"
|
||||||
SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"Table - {data_type} - {func}"):
|
with Scenario(f"Table - {data_type} - {func}"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -234,20 +218,18 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Array({data_type})')
|
table(name = table_name, data_type = f'Array({data_type})')
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
|
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), 1)")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
for func in ['has',
|
for func in ['has',
|
||||||
'indexOf',
|
'indexOf',
|
||||||
'countEqual']:
|
'countEqual']:
|
||||||
|
|
||||||
with Scenario(f"Inline - {data_type} - {func}"):
|
with Scenario(f"Inline - {data_type} - {func}"):
|
||||||
execute_query(f"""
|
execute_query(f"SELECT {func}(array({to_data_type(data_type,3)},"
|
||||||
SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), NULL)
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), NULL)")
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"Table - {data_type} - {func}"):
|
with Scenario(f"Table - {data_type} - {func}"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -255,11 +237,10 @@ def array_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = data_type)
|
table(name = table_name, data_type = data_type)
|
||||||
|
|
||||||
with When("I insert the output into the table"):
|
with When("I insert the output into the table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)}, {to_data_type(data_type,2)}, {to_data_type(data_type,1)}), NULL)")
|
node.query(f"INSERT INTO {table_name} SELECT {func}(array({to_data_type(data_type,3)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,1)}), NULL)")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
@TestOutline(Suite)
|
@TestOutline(Suite)
|
||||||
@Requirements(
|
@Requirements(
|
||||||
@ -281,11 +262,10 @@ def tuple_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Tuple({data_type}, {data_type}, {data_type})')
|
table(name = table_name, data_type = f'Tuple({data_type}, {data_type}, {data_type})')
|
||||||
|
|
||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT tuple({to_data_type(data_type,1)}, {to_data_type(data_type,1)}, {to_data_type(data_type,1)})")
|
node.query(f"INSERT INTO {table_name} SELECT tuple({to_data_type(data_type,1)},"
|
||||||
|
f"{to_data_type(data_type,1)}, {to_data_type(data_type,1)})")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"tupleElement with {data_type}"):
|
with Scenario(f"tupleElement with {data_type}"):
|
||||||
node.query(f"SELECT tupleElement(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), 1)")
|
node.query(f"SELECT tupleElement(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), 1)")
|
||||||
@ -298,9 +278,7 @@ def tuple_func(self, data_type, node=None):
|
|||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT tupleElement(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), 1)")
|
node.query(f"INSERT INTO {table_name} SELECT tupleElement(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), 1)")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"untuple with {data_type}"):
|
with Scenario(f"untuple with {data_type}"):
|
||||||
node.query(f"SELECT untuple(({to_data_type(data_type,1)},))")
|
node.query(f"SELECT untuple(({to_data_type(data_type,1)},))")
|
||||||
@ -313,12 +291,11 @@ def tuple_func(self, data_type, node=None):
|
|||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT untuple(({to_data_type(data_type,1)},))")
|
node.query(f"INSERT INTO {table_name} SELECT untuple(({to_data_type(data_type,1)},))")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"tupleHammingDistance with {data_type}"):
|
with Scenario(f"tupleHammingDistance with {data_type}"):
|
||||||
node.query(f"SELECT tupleHammingDistance(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), ({to_data_type(data_type,2)}, {to_data_type(data_type,2)}))")
|
node.query(f"SELECT tupleHammingDistance(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}),"
|
||||||
|
f"({to_data_type(data_type,2)}, {to_data_type(data_type,2)}))")
|
||||||
|
|
||||||
with Scenario(f"tupleHammingDistance with {data_type} on a table"):
|
with Scenario(f"tupleHammingDistance with {data_type} on a table"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -326,11 +303,10 @@ def tuple_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = data_type)
|
table(name = table_name, data_type = data_type)
|
||||||
|
|
||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT tupleHammingDistance(({to_data_type(data_type,1)}, {to_data_type(data_type,1)}), ({to_data_type(data_type,2)}, {to_data_type(data_type,2)}))")
|
node.query(f"INSERT INTO {table_name} SELECT tupleHammingDistance(({to_data_type(data_type,1)},"
|
||||||
|
f"{to_data_type(data_type,1)}), ({to_data_type(data_type,2)}, {to_data_type(data_type,2)}))")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
@TestOutline(Suite)
|
@TestOutline(Suite)
|
||||||
@Requirements(
|
@Requirements(
|
||||||
@ -355,13 +331,17 @@ def map_func(self, data_type, node=None):
|
|||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)})")
|
node.query(f"INSERT INTO {table_name} SELECT map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)})")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name}")
|
||||||
SELECT * FROM {table_name}
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"mapAdd with {data_type}"):
|
with Scenario(f"mapAdd with {data_type}"):
|
||||||
node.query(f"SELECT mapAdd(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))",
|
sql = f"SELECT mapAdd(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
|
||||||
exitcode = 44, message='Exception:')
|
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]),"
|
||||||
|
f"([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
|
||||||
|
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))"
|
||||||
|
if data_type.startswith("Decimal"):
|
||||||
|
node.query(sql, exitcode=43, message="Exception:")
|
||||||
|
else:
|
||||||
|
execute_query(sql)
|
||||||
|
|
||||||
with Scenario(f"mapAdd with {data_type} on a table"):
|
with Scenario(f"mapAdd with {data_type} on a table"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -369,16 +349,27 @@ def map_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
|
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
|
||||||
|
|
||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT mapAdd(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))",
|
sql = (f"INSERT INTO {table_name} SELECT mapAdd(([{to_data_type(data_type,1)},{to_data_type(data_type,2)}],"
|
||||||
exitcode = 44, message='Exception:')
|
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
|
||||||
|
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))")
|
||||||
|
exitcode, message = 0, None
|
||||||
|
|
||||||
execute_query(f"""
|
if data_type.startswith("Decimal"):
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
exitcode, message = 43, "Exception:"
|
||||||
""")
|
node.query(sql, exitcode=exitcode, message=message)
|
||||||
|
|
||||||
|
execute_query(f"""SELECT * FROM {table_name} ORDER BY a ASC""")
|
||||||
|
|
||||||
with Scenario(f"mapSubtract with {data_type}"):
|
with Scenario(f"mapSubtract with {data_type}"):
|
||||||
node.query(f"SELECT mapSubtract(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))",
|
sql = (f"SELECT mapSubtract(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
|
||||||
exitcode = 44, message='Exception:')
|
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]),"
|
||||||
|
f"([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}],"
|
||||||
|
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))")
|
||||||
|
|
||||||
|
if data_type.startswith("Decimal"):
|
||||||
|
node.query(sql, exitcode=43, message="Exception:")
|
||||||
|
else:
|
||||||
|
execute_query(sql)
|
||||||
|
|
||||||
with Scenario(f"mapSubtract with {data_type} on a table"):
|
with Scenario(f"mapSubtract with {data_type} on a table"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -386,15 +377,21 @@ def map_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
|
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
|
||||||
|
|
||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT mapSubtract(([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)}, {to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))",
|
sql = (f"INSERT INTO {table_name} SELECT mapSubtract(([{to_data_type(data_type,1)},"
|
||||||
exitcode = 44, message='Exception:')
|
f"{to_data_type(data_type,2)}], [{to_data_type(data_type,1)},"
|
||||||
|
f"{to_data_type(data_type,2)}]), ([{to_data_type(data_type,1)},"
|
||||||
|
f"{to_data_type(data_type,2)}], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}]))")
|
||||||
|
exitcode, message = 0, None
|
||||||
|
|
||||||
execute_query(f"""
|
if data_type.startswith("Decimal"):
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
exitcode, message = 43, "Exception:"
|
||||||
""")
|
node.query(sql, exitcode=exitcode, message=message)
|
||||||
|
|
||||||
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
|
|
||||||
with Scenario(f"mapPopulateSeries with {data_type}"):
|
with Scenario(f"mapPopulateSeries with {data_type}"):
|
||||||
node.query(f"SELECT mapPopulateSeries([1,2,3], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}, {to_data_type(data_type,3)}], 5)",
|
node.query(f"SELECT mapPopulateSeries([1,2,3], [{to_data_type(data_type,1)},"
|
||||||
|
f"{to_data_type(data_type,2)}, {to_data_type(data_type,3)}], 5)",
|
||||||
exitcode = 44, message='Exception:')
|
exitcode = 44, message='Exception:')
|
||||||
|
|
||||||
with Scenario(f"mapPopulateSeries with {data_type} on a table"):
|
with Scenario(f"mapPopulateSeries with {data_type} on a table"):
|
||||||
@ -403,15 +400,15 @@ def map_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
|
table(name = table_name, data_type = f'Tuple(Array({data_type}), Array({data_type}))')
|
||||||
|
|
||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT mapPopulateSeries([1,2,3], [{to_data_type(data_type,1)}, {to_data_type(data_type,2)}, {to_data_type(data_type,3)}], 5)",
|
node.query(f"INSERT INTO {table_name} SELECT mapPopulateSeries([1,2,3],"
|
||||||
|
f"[{to_data_type(data_type,1)}, {to_data_type(data_type,2)}, {to_data_type(data_type,3)}], 5)",
|
||||||
exitcode = 44, message='Exception:')
|
exitcode = 44, message='Exception:')
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"mapContains with {data_type}"):
|
with Scenario(f"mapContains with {data_type}"):
|
||||||
node.query(f"SELECT mapContains( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}), 'key1')")
|
node.query(f"SELECT mapContains( map('key1', {to_data_type(data_type,1)},"
|
||||||
|
f"'key2', {to_data_type(data_type,2)}), 'key1')")
|
||||||
|
|
||||||
with Scenario(f"mapContains with {data_type} on a table"):
|
with Scenario(f"mapContains with {data_type} on a table"):
|
||||||
table_name = get_table_name()
|
table_name = get_table_name()
|
||||||
@ -419,11 +416,10 @@ def map_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = data_type)
|
table(name = table_name, data_type = data_type)
|
||||||
|
|
||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT mapContains( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}), 'key1')")
|
node.query(f"INSERT INTO {table_name} SELECT mapContains( map('key1', {to_data_type(data_type,1)},"
|
||||||
|
f"'key2', {to_data_type(data_type,2)}), 'key1')")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"mapKeys with {data_type}"):
|
with Scenario(f"mapKeys with {data_type}"):
|
||||||
node.query(f"SELECT mapKeys( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
|
node.query(f"SELECT mapKeys( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
|
||||||
@ -434,11 +430,10 @@ def map_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = 'Array(String)')
|
table(name = table_name, data_type = 'Array(String)')
|
||||||
|
|
||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT mapKeys( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
|
node.query(f"INSERT INTO {table_name} SELECT mapKeys( map('key1', {to_data_type(data_type,1)},"
|
||||||
|
f"'key2', {to_data_type(data_type,2)}))")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
with Scenario(f"mapValues with {data_type}"):
|
with Scenario(f"mapValues with {data_type}"):
|
||||||
node.query(f"SELECT mapValues( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
|
node.query(f"SELECT mapValues( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
|
||||||
@ -449,11 +444,10 @@ def map_func(self, data_type, node=None):
|
|||||||
table(name = table_name, data_type = f'Array({data_type})')
|
table(name = table_name, data_type = f'Array({data_type})')
|
||||||
|
|
||||||
with When("I insert the output into a table"):
|
with When("I insert the output into a table"):
|
||||||
node.query(f"INSERT INTO {table_name} SELECT mapValues( map('key1', {to_data_type(data_type,1)}, 'key2', {to_data_type(data_type,2)}))")
|
node.query(f"INSERT INTO {table_name} SELECT mapValues( map('key1', {to_data_type(data_type,1)},"
|
||||||
|
f"'key2', {to_data_type(data_type,2)}))")
|
||||||
|
|
||||||
execute_query(f"""
|
execute_query(f"SELECT * FROM {table_name} ORDER BY a ASC")
|
||||||
SELECT * FROM {table_name} ORDER BY a ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
@TestFeature
|
@TestFeature
|
||||||
@Name("array, tuple, map")
|
@Name("array, tuple, map")
|
||||||
@ -465,7 +459,8 @@ def map_func(self, data_type, node=None):
|
|||||||
('Decimal256(0)',),
|
('Decimal256(0)',),
|
||||||
])
|
])
|
||||||
def feature(self, node="clickhouse1", stress=None, parallel=None):
|
def feature(self, node="clickhouse1", stress=None, parallel=None):
|
||||||
"""Check that array, tuple, and map functions work with extended precision data types.
|
"""Check that array, tuple, and map functions work with
|
||||||
|
extended precision data types.
|
||||||
"""
|
"""
|
||||||
self.context.node = self.context.cluster.node(node)
|
self.context.node = self.context.cluster.node(node)
|
||||||
|
|
||||||
|
@ -2103,7 +2103,7 @@ def insert_on_source_table(self, grant_target_name, user_name, node=None):
|
|||||||
with When("I grant INSERT on the source table"):
|
with When("I grant INSERT on the source table"):
|
||||||
node.query(f"GRANT INSERT ON {table1_name} TO {grant_target_name}")
|
node.query(f"GRANT INSERT ON {table1_name} TO {grant_target_name}")
|
||||||
with Then("I attempt to insert into the source table"):
|
with Then("I attempt to insert into the source table"):
|
||||||
node.query(f"INSERT INTO {table1_name}(d) VALUES ('01-01-2020')", settings = [("user",f"{user_name}")])
|
node.query(f"INSERT INTO {table1_name}(d) VALUES ('2020-01-01')", settings = [("user",f"{user_name}")])
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with Finally("I drop the view"):
|
with Finally("I drop the view"):
|
||||||
@ -2152,7 +2152,7 @@ def insert_with_insert_privilege(self, grant_target_name, user_name, node=None):
|
|||||||
with When("I grant INSERT on the view"):
|
with When("I grant INSERT on the view"):
|
||||||
node.query(f"GRANT INSERT ON {view_name} TO {grant_target_name}")
|
node.query(f"GRANT INSERT ON {view_name} TO {grant_target_name}")
|
||||||
with Then("I attempt to insert into the view"):
|
with Then("I attempt to insert into the view"):
|
||||||
node.query(f"INSERT INTO {view_name}(d) VALUES ('01-01-2020')",
|
node.query(f"INSERT INTO {view_name}(d) VALUES ('2020-01-01')",
|
||||||
settings = [("user",f"{user_name}")])
|
settings = [("user",f"{user_name}")])
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
@ -2201,7 +2201,7 @@ def insert_on_target_table(self, grant_target_name, user_name, node=None):
|
|||||||
with When("I grant INSERT on the target table"):
|
with When("I grant INSERT on the target table"):
|
||||||
node.query(f"GRANT INSERT ON {table0_name} TO {grant_target_name}")
|
node.query(f"GRANT INSERT ON {table0_name} TO {grant_target_name}")
|
||||||
with Then("I attempt to insert into the target table"):
|
with Then("I attempt to insert into the target table"):
|
||||||
node.query(f"INSERT INTO {table0_name}(d) VALUES ('01-01-2020')", settings = [("user",f"{user_name}")])
|
node.query(f"INSERT INTO {table0_name}(d) VALUES ('2020-01-01')", settings = [("user",f"{user_name}")])
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with Finally("I drop the view"):
|
with Finally("I drop the view"):
|
||||||
@ -2248,7 +2248,7 @@ def insert_on_target_table(self, grant_target_name, user_name, node=None):
|
|||||||
with When("I grant INSERT on the target table"):
|
with When("I grant INSERT on the target table"):
|
||||||
node.query(f"GRANT INSERT ON {implicit_table_name} TO {grant_target_name}")
|
node.query(f"GRANT INSERT ON {implicit_table_name} TO {grant_target_name}")
|
||||||
with Then("I attempt to insert into the target table"):
|
with Then("I attempt to insert into the target table"):
|
||||||
node.query(f"INSERT INTO {implicit_table_name}(d) VALUES ('01-01-2020')", settings = [("user",f"{user_name}")])
|
node.query(f"INSERT INTO {implicit_table_name}(d) VALUES ('2020-01-01')", settings = [("user",f"{user_name}")])
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with Finally("I drop the view"):
|
with Finally("I drop the view"):
|
||||||
|
@ -23,14 +23,14 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
|
|||||||
with Pool(8) as pool:
|
with Pool(8) as pool:
|
||||||
try:
|
try:
|
||||||
run_scenario(pool, tasks, Feature(test=load("example.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("example.regression", "regression")), args)
|
||||||
#run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
|
||||||
#run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args)
|
||||||
#run_scenario(pool, tasks, Feature(test=load("aes_encryption.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("aes_encryption.regression", "regression")), args)
|
||||||
#run_scenario(pool, tasks, Feature(test=load("map_type.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("map_type.regression", "regression")), args)
|
||||||
#run_scenario(pool, tasks, Feature(test=load("window_functions.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("window_functions.regression", "regression")), args)
|
||||||
#run_scenario(pool, tasks, Feature(test=load("datetime64_extended_range.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("datetime64_extended_range.regression", "regression")), args)
|
||||||
#run_scenario(pool, tasks, Feature(test=load("kerberos.regression", "regression")), args)
|
#run_scenario(pool, tasks, Feature(test=load("kerberos.regression", "regression")), args)
|
||||||
#run_scenario(pool, tasks, Feature(test=load("extended_precision_data_types.regression", "regression")), args)
|
run_scenario(pool, tasks, Feature(test=load("extended_precision_data_types.regression", "regression")), args)
|
||||||
finally:
|
finally:
|
||||||
join(tasks)
|
join(tasks)
|
||||||
|
|
||||||
|
@ -41,6 +41,8 @@ xfails = {
|
|||||||
[(Fail, "not supported, https://github.com/ClickHouse/ClickHouse/issues/19857")],
|
[(Fail, "not supported, https://github.com/ClickHouse/ClickHouse/issues/19857")],
|
||||||
"tests/:/misc/window functions in subquery":
|
"tests/:/misc/window functions in subquery":
|
||||||
[(Fail, "not supported, https://github.com/ClickHouse/ClickHouse/issues/19857")],
|
[(Fail, "not supported, https://github.com/ClickHouse/ClickHouse/issues/19857")],
|
||||||
|
"tests/:/misc/in view":
|
||||||
|
[(Fail, "bug, https://github.com/ClickHouse/ClickHouse/issues/26001")],
|
||||||
"tests/:/frame clause/range frame/order by decimal":
|
"tests/:/frame clause/range frame/order by decimal":
|
||||||
[(Fail, "Exception: The RANGE OFFSET frame for 'DB::ColumnDecimal<DB::Decimal<long> >' ORDER BY column is not implemented")],
|
[(Fail, "Exception: The RANGE OFFSET frame for 'DB::ColumnDecimal<DB::Decimal<long> >' ORDER BY column is not implemented")],
|
||||||
"tests/:/frame clause/range frame/with nulls":
|
"tests/:/frame clause/range frame/with nulls":
|
||||||
|
Loading…
Reference in New Issue
Block a user