2019-12-26 17:35:41 +00:00
#!/usr/bin/python3
2020-01-21 18:15:25 +00:00
import os
2020-01-16 14:29:30 +00:00
import sys
2019-12-26 17:35:41 +00:00
import itertools
import clickhouse_driver
import xml . etree . ElementTree as et
import argparse
import pprint
2020-03-03 13:38:45 +00:00
import string
2020-01-16 19:39:07 +00:00
import time
import traceback
2019-12-26 17:35:41 +00:00
2020-02-14 12:55:47 +00:00
stage_start_seconds = time . perf_counter ( )
def report_stage_end ( stage_name ) :
global stage_start_seconds
print ( ' {} \t {} ' . format ( stage_name , time . perf_counter ( ) - stage_start_seconds ) )
stage_start_seconds = time . perf_counter ( )
report_stage_end ( ' start ' )
2019-12-26 17:35:41 +00:00
parser = argparse . ArgumentParser ( description = ' Run performance test. ' )
2020-01-10 14:06:07 +00:00
# Explicitly decode files as UTF-8 because sometimes we have Russian characters in queries, and LANG=C is set.
parser . add_argument ( ' file ' , metavar = ' FILE ' , type = argparse . FileType ( ' r ' , encoding = ' utf-8 ' ) , nargs = 1 , help = ' test description file ' )
2020-03-03 10:47:32 +00:00
parser . add_argument ( ' --host ' , nargs = ' * ' , default = [ ' 127.0.0.1 ' , ' 127.0.0.1 ' ] , help = " Server hostname. Parallel to ' --port ' . " )
parser . add_argument ( ' --port ' , nargs = ' * ' , default = [ 9001 , 9002 ] , help = " Server port. Parallel to ' --host ' . " )
parser . add_argument ( ' --runs ' , type = int , default = int ( os . environ . get ( ' CHPC_RUNS ' , 7 ) ) , help = ' Number of query runs per server. Defaults to CHPC_RUNS environment variable. ' )
parser . add_argument ( ' --no-long ' , type = bool , default = True , help = ' Skip the tests tagged as long. ' )
2019-12-26 17:35:41 +00:00
args = parser . parse_args ( )
tree = et . parse ( args . file [ 0 ] )
root = tree . getroot ( )
2020-03-03 10:47:32 +00:00
# Skip long tests
for tag in root . findall ( ' .//tag ' ) :
if tag . text == ' long ' :
print ( ' skipped \t Test is tagged as long. ' )
sys . exit ( 0 )
2019-12-26 17:35:41 +00:00
# Check main metric
2019-12-26 19:16:36 +00:00
main_metric_element = root . find ( ' main_metric/* ' )
2019-12-26 21:33:10 +00:00
if main_metric_element is not None and main_metric_element . tag != ' min_time ' :
raise Exception ( ' Only the min_time main metric is supported. This test uses \' {} \' ' . format ( main_metric_element . tag ) )
# FIXME another way to detect infinite tests. They should have an appropriate main_metric but sometimes they don't.
infinite_sign = root . find ( ' .//average_speed_not_changing_for_ms ' )
if infinite_sign is not None :
raise Exception ( ' Looks like the test is infinite (sign 1) ' )
2019-12-26 17:35:41 +00:00
# Open connections
2020-01-21 18:15:25 +00:00
servers = [ { ' host ' : host , ' port ' : port } for ( host , port ) in zip ( args . host , args . port ) ]
2019-12-26 17:35:41 +00:00
connections = [ clickhouse_driver . Client ( * * server ) for server in servers ]
2020-03-02 15:05:58 +00:00
for s in servers :
print ( ' server \t {} \t {} ' . format ( s [ ' host ' ] , s [ ' port ' ] ) )
2020-02-14 12:55:47 +00:00
2020-03-02 15:05:58 +00:00
report_stage_end ( ' connect ' )
2020-02-14 12:55:47 +00:00
2020-03-03 13:38:45 +00:00
# Process query parameters
2019-12-26 17:35:41 +00:00
subst_elems = root . findall ( ' substitutions/substitution ' )
2020-03-03 13:38:45 +00:00
available_parameters = { } # { 'table': ['hits_10m', 'hits_100m'], ... }
for e in subst_elems :
available_parameters [ e . find ( ' name ' ) . text ] = [ v . text for v in e . findall ( ' values/value ' ) ]
2019-12-26 17:35:41 +00:00
2020-02-21 19:58:52 +00:00
# Take care to keep the order of queries -- sometimes we have DROP IF EXISTS
# followed by CREATE in create queries section, so the order matters.
2020-03-03 13:38:45 +00:00
def substitute_parameters ( query_templates ) :
result = [ ]
for q in query_templates :
keys = set ( n for _ , n , _ , _ in string . Formatter ( ) . parse ( q ) if n )
values = [ available_parameters [ k ] for k in keys ]
result . extend ( [
q . format ( * * dict ( zip ( keys , values_combo ) ) )
for values_combo in itertools . product ( * values ) ] )
return result
2019-12-26 17:35:41 +00:00
2020-02-14 12:55:47 +00:00
report_stage_end ( ' substitute ' )
2020-03-02 15:05:58 +00:00
# Run drop queries, ignoring errors. Do this before all other activity, because
# clickhouse_driver disconnects on error (this is not configurable), and the new
# connection loses the changes in settings.
2019-12-26 17:35:41 +00:00
drop_query_templates = [ q . text for q in root . findall ( ' drop_query ' ) ]
2020-03-03 13:38:45 +00:00
drop_queries = substitute_parameters ( drop_query_templates )
2019-12-26 17:35:41 +00:00
for c in connections :
for q in drop_queries :
try :
c . execute ( q )
except :
2020-01-16 19:39:07 +00:00
pass
2019-12-26 17:35:41 +00:00
2020-03-02 15:05:58 +00:00
report_stage_end ( ' drop1 ' )
# Apply settings
settings = root . findall ( ' settings/* ' )
for c in connections :
for s in settings :
c . execute ( " set {} = ' {} ' " . format ( s . tag , s . text ) )
report_stage_end ( ' settings ' )
# Check tables that should exist. If they don't exist, just skip this test.
tables = [ e . text for e in root . findall ( ' preconditions/table_exists ' ) ]
for t in tables :
for c in connections :
try :
2020-03-11 21:07:34 +00:00
res = c . execute ( " select 1 from {} limit 1 " . format ( t ) )
2020-03-02 15:05:58 +00:00
except :
print ( ' skipped \t ' + traceback . format_exception_only ( * sys . exc_info ( ) [ : 2 ] ) [ - 1 ] )
traceback . print_exc ( )
sys . exit ( 0 )
report_stage_end ( ' preconditions ' )
2019-12-26 17:35:41 +00:00
# Run create queries
create_query_templates = [ q . text for q in root . findall ( ' create_query ' ) ]
2020-03-03 13:38:45 +00:00
create_queries = substitute_parameters ( create_query_templates )
2019-12-26 17:35:41 +00:00
for c in connections :
for q in create_queries :
c . execute ( q )
# Run fill queries
fill_query_templates = [ q . text for q in root . findall ( ' fill_query ' ) ]
2020-03-03 13:38:45 +00:00
fill_queries = substitute_parameters ( fill_query_templates )
2019-12-26 17:35:41 +00:00
for c in connections :
for q in fill_queries :
c . execute ( q )
2020-02-14 12:55:47 +00:00
report_stage_end ( ' fill ' )
2019-12-26 17:35:41 +00:00
# Run test queries
2019-12-26 21:33:10 +00:00
def tsv_escape ( s ) :
return s . replace ( ' \\ ' , ' \\ \\ ' ) . replace ( ' \t ' , ' \\ t ' ) . replace ( ' \n ' , ' \\ n ' ) . replace ( ' \r ' , ' ' )
2019-12-26 17:35:41 +00:00
test_query_templates = [ q . text for q in root . findall ( ' query ' ) ]
2020-03-03 13:38:45 +00:00
test_queries = substitute_parameters ( test_query_templates )
2019-12-26 17:35:41 +00:00
2020-02-14 12:55:47 +00:00
report_stage_end ( ' substitute2 ' )
2019-12-26 17:35:41 +00:00
for q in test_queries :
2020-02-11 20:00:53 +00:00
# Prewarm: run once on both servers. Helps to bring the data into memory,
2020-02-11 15:01:16 +00:00
# precompile the queries, etc.
2020-02-11 20:00:53 +00:00
for conn_index , c in enumerate ( connections ) :
2020-02-20 16:28:21 +00:00
res = c . execute ( q , query_id = ' prewarm {} {} ' . format ( 0 , q ) )
2020-02-11 20:00:53 +00:00
print ( ' prewarm \t ' + tsv_escape ( q ) + ' \t ' + str ( conn_index ) + ' \t ' + str ( c . last_query . elapsed ) )
2020-02-11 15:01:16 +00:00
# Now, perform measured runs.
2020-01-16 19:39:07 +00:00
# Track the time spent by the client to process this query, so that we can notice
# out the queries that take long to process on the client side, e.g. by sending
# excessive data.
start_seconds = time . perf_counter ( )
server_seconds = 0
2020-01-21 18:15:25 +00:00
for run in range ( 0 , args . runs ) :
2019-12-26 17:35:41 +00:00
for conn_index , c in enumerate ( connections ) :
res = c . execute ( q )
2020-01-16 19:39:07 +00:00
print ( ' query \t ' + tsv_escape ( q ) + ' \t ' + str ( run ) + ' \t ' + str ( conn_index ) + ' \t ' + str ( c . last_query . elapsed ) )
server_seconds + = c . last_query . elapsed
client_seconds = time . perf_counter ( ) - start_seconds
print ( ' client-time \t {} \t {} \t {} ' . format ( tsv_escape ( q ) , client_seconds , server_seconds ) )
2019-12-26 17:35:41 +00:00
2020-02-14 12:55:47 +00:00
report_stage_end ( ' benchmark ' )
2019-12-26 17:35:41 +00:00
# Run drop queries
drop_query_templates = [ q . text for q in root . findall ( ' drop_query ' ) ]
2020-03-03 13:38:45 +00:00
drop_queries = substitute_parameters ( drop_query_templates )
2019-12-26 17:35:41 +00:00
for c in connections :
for q in drop_queries :
c . execute ( q )
2020-02-14 12:55:47 +00:00
2020-03-02 15:05:58 +00:00
report_stage_end ( ' drop2 ' )