Multiple sqllogictest improvements

This commit is contained in:
Yatsishin Ilya 2023-07-19 12:24:31 +00:00
parent b9b3222c6e
commit bb4924ab63
7 changed files with 87 additions and 11 deletions

View File

@ -96,5 +96,4 @@ rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
zstd < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.zst &
# Compressed (FIXME: remove once only github actions will be left)
rm /var/log/clickhouse-server/clickhouse-server.log
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:

View File

@ -62,7 +62,8 @@ def default_clickhouse_odbc_conn_str():
return str(
OdbcConnectingArgs.create_from_kw(
dsn="ClickHouse DSN (ANSI)",
Url="http://localhost:8123/query?default_format=ODBCDriver2&default_table_engine=MergeTree&union_default_mode=DISTINCT&group_by_use_nulls=1&join_use_nulls=1&allow_create_index_without_type=1",
Timeout="300",
Url="http://localhost:8123/query?default_format=ODBCDriver2&default_table_engine=MergeTree&union_default_mode=DISTINCT&group_by_use_nulls=1&join_use_nulls=1&allow_create_index_without_type=1&create_index_ignore_unique=1",
)
)

View File

@ -186,10 +186,10 @@ def mode_check_statements(parser):
out_stages_dir = os.path.join(out_dir, f"{args.mode}-stages")
complete_sqlite_dir = os.path.join(out_stages_dir, "complete-sqlite")
complete_sqlite_dir = os.path.join(out_stages_dir, "statements-sqlite")
os.makedirs(complete_sqlite_dir, exist_ok=True)
reports["complete-sqlite"] = run_all_tests_in_parallel(
reports["statements-sqlite"] = run_all_tests_in_parallel(
setup_kwargs=as_kwargs(
engine=Engines.SQLITE,
),
@ -223,6 +223,62 @@ def mode_check_statements(parser):
parser.set_defaults(func=calle)
def mode_check_complete(parser):
parser.add_argument("--input-dir", metavar="DIR", required=True)
parser.add_argument("--out-dir", metavar="DIR", required=True)
def calle(args):
input_dir = os.path.realpath(args.input_dir)
out_dir = os.path.realpath(args.out_dir)
if not os.path.exists(input_dir):
raise FileNotFoundError(
input_dir, f"check statements: no such file or directory {input_dir}"
)
if not os.path.isdir(input_dir):
raise NotADirectoryError(
input_dir, f"check statements:: not a dir {input_dir}"
)
reports = dict()
out_stages_dir = os.path.join(out_dir, f"{args.mode}-stages")
complete_sqlite_dir = os.path.join(out_stages_dir, "complete-sqlite")
os.makedirs(complete_sqlite_dir, exist_ok=True)
reports["complete-sqlite"] = run_all_tests_in_parallel(
setup_kwargs=as_kwargs(
engine=Engines.SQLITE,
),
runner_kwargs=as_kwargs(
verify_mode=False,
stop_at_statement_error=True,
),
input_dir=input_dir,
output_dir=complete_sqlite_dir,
)
verify_clickhouse_dir = os.path.join(out_stages_dir, "complete-clickhouse")
os.makedirs(verify_clickhouse_dir, exist_ok=True)
reports["complete-clickhouse"] = run_all_tests_in_parallel(
setup_kwargs=as_kwargs(
engine=Engines.ODBC,
conn_str=default_clickhouse_odbc_conn_str(),
),
runner_kwargs=as_kwargs(
verify_mode=True,
stop_at_statement_error=True,
),
input_dir=complete_sqlite_dir,
output_dir=verify_clickhouse_dir,
)
statements_report(reports, out_dir, args.mode)
parser.set_defaults(func=calle)
def make_actual_report(reports):
return {stage: report.get_map() for stage, report in reports.items()}
@ -399,16 +455,22 @@ def parse_args():
)
subparsers = parser.add_subparsers(dest="mode")
mode_check_complete(
subparsers.add_parser(
"complete-test",
help="Run all tests. Check that all statements and queries are passed",
)
)
mode_check_statements(
subparsers.add_parser(
"statements-test",
help="Run all test. Check that all statements are passed",
help="Run all tests. Check that all statements are passed",
)
)
mode_self_test(
subparsers.add_parser(
"self-test",
help="Run all test. Check that all statements are passed",
help="Run all tests. Check that all statements are passed",
)
)
args = parser.parse_args()

File diff suppressed because one or more lines are too long

View File

@ -142,4 +142,13 @@ SELECT number+1 from system.numbers LIMIT 20
----
20 values hashing to 52c46dff81346ead02fcf6245c762b1a
# Debug how incorrect result type parses
statement ok
CREATE TABLE tab0(pk INTEGER PRIMARY KEY, col0 INTEGER, col1 FLOAT, col2 TEXT, col3 INTEGER, col4 FLOAT, col5 TEXT)
statement ok
INSERT INTO tab0 VALUES(0,535,860.48,'uxbns',253,640.58,'jvqkl')
skipif ClickHouse
query I rowsort label-20
SELECT + col2 AS col5 FROM tab0 WHERE NOT ( col0 ) * - - col4 IS NULL

View File

@ -9,7 +9,7 @@ from enum import Enum
from hashlib import md5
from functools import reduce
from exceptions import Error, ProgramError, ErrorWithParent, DataResultDiffer
from exceptions import Error, ProgramError, ErrorWithParent, DataResultDiffer, QueryExecutionError
logger = logging.getLogger("parser")
@ -480,6 +480,7 @@ class QueryResult:
for row in rows:
res_row = []
for c, t in zip(row, types):
logger.debug(f"Builging row. c:{c} t:{t}")
if c is None:
res_row.append("NULL")
continue
@ -490,7 +491,10 @@ class QueryResult:
else:
res_row.append(str(c))
elif t == "I":
res_row.append(str(int(c)))
try:
res_row.append(str(int(c)))
except ValueError as ex:
raise QueryExecutionError(f"Got non-integer result '{c}' for I type.")
elif t == "R":
res_row.append(f"{c:.3f}")

View File

@ -361,7 +361,7 @@ class TestRunner:
continue
if block.get_block_type() == test_parser.BlockType.control:
clogger.debug("Skip control block", name_pos)
clogger.debug("Skip control block %s", name_pos)
block.dump_to(out_stream)
continue
@ -374,13 +374,14 @@ class TestRunner:
continue
request = block.get_request()
exec_res = execute_request(request, self.connection)
if block.get_block_type() in self.skip_request_types:
clogger.debug("Runtime skip block for %s", self.dbms_name)
block.dump_to(out_stream)
continue
exec_res = execute_request(request, self.connection)
if block.get_block_type() == test_parser.BlockType.statement:
try:
clogger.debug("this is statement")