2019-12-26 17:35:41 +00:00
#!/usr/bin/python3
2020-01-21 18:15:25 +00:00
import os
2020-01-16 14:29:30 +00:00
import sys
2019-12-26 17:35:41 +00:00
import itertools
import clickhouse_driver
import xml . etree . ElementTree as et
import argparse
import pprint
2020-03-03 13:38:45 +00:00
import string
2020-01-16 19:39:07 +00:00
import time
import traceback
2019-12-26 17:35:41 +00:00
2020-02-14 12:55:47 +00:00
stage_start_seconds = time . perf_counter ( )
def report_stage_end ( stage_name ) :
global stage_start_seconds
print ( ' {} \t {} ' . format ( stage_name , time . perf_counter ( ) - stage_start_seconds ) )
stage_start_seconds = time . perf_counter ( )
report_stage_end ( ' start ' )
2019-12-26 17:35:41 +00:00
parser = argparse . ArgumentParser ( description = ' Run performance test. ' )
2020-01-10 14:06:07 +00:00
# Explicitly decode files as UTF-8 because sometimes we have Russian characters in queries, and LANG=C is set.
parser . add_argument ( ' file ' , metavar = ' FILE ' , type = argparse . FileType ( ' r ' , encoding = ' utf-8 ' ) , nargs = 1 , help = ' test description file ' )
2020-04-06 20:47:55 +00:00
parser . add_argument ( ' --host ' , nargs = ' * ' , default = [ ' localhost ' ] , help = " Server hostname(s). Corresponds to ' --port ' options. " )
parser . add_argument ( ' --port ' , nargs = ' * ' , default = [ 9000 ] , help = " Server port(s). Corresponds to ' --host ' options. " )
2020-04-21 18:46:45 +00:00
parser . add_argument ( ' --runs ' , type = int , default = int ( os . environ . get ( ' CHPC_RUNS ' , 13 ) ) , help = ' Number of query runs per server. Defaults to CHPC_RUNS environment variable. ' )
2020-03-03 10:47:32 +00:00
parser . add_argument ( ' --no-long ' , type = bool , default = True , help = ' Skip the tests tagged as long. ' )
2019-12-26 17:35:41 +00:00
args = parser . parse_args ( )
tree = et . parse ( args . file [ 0 ] )
root = tree . getroot ( )
2020-03-03 10:47:32 +00:00
# Skip long tests
for tag in root . findall ( ' .//tag ' ) :
if tag . text == ' long ' :
print ( ' skipped \t Test is tagged as long. ' )
sys . exit ( 0 )
2019-12-26 17:35:41 +00:00
# Check main metric
2019-12-26 19:16:36 +00:00
main_metric_element = root . find ( ' main_metric/* ' )
2019-12-26 21:33:10 +00:00
if main_metric_element is not None and main_metric_element . tag != ' min_time ' :
raise Exception ( ' Only the min_time main metric is supported. This test uses \' {} \' ' . format ( main_metric_element . tag ) )
# FIXME another way to detect infinite tests. They should have an appropriate main_metric but sometimes they don't.
infinite_sign = root . find ( ' .//average_speed_not_changing_for_ms ' )
if infinite_sign is not None :
raise Exception ( ' Looks like the test is infinite (sign 1) ' )
2019-12-26 17:35:41 +00:00
2020-04-28 07:45:35 +00:00
# Print report threshold for the test if it is set.
if ' max_ignored_relative_change ' in root . attrib :
print ( f ' report-threshold \t { root . attrib [ " max_ignored_relative_change " ] } ' )
2019-12-26 17:35:41 +00:00
# Open connections
2020-01-21 18:15:25 +00:00
servers = [ { ' host ' : host , ' port ' : port } for ( host , port ) in zip ( args . host , args . port ) ]
2019-12-26 17:35:41 +00:00
connections = [ clickhouse_driver . Client ( * * server ) for server in servers ]
2020-03-02 15:05:58 +00:00
for s in servers :
print ( ' server \t {} \t {} ' . format ( s [ ' host ' ] , s [ ' port ' ] ) )
2020-02-14 12:55:47 +00:00
2020-03-02 15:05:58 +00:00
report_stage_end ( ' connect ' )
2020-02-14 12:55:47 +00:00
2020-03-03 13:38:45 +00:00
# Process query parameters
2019-12-26 17:35:41 +00:00
subst_elems = root . findall ( ' substitutions/substitution ' )
2020-03-03 13:38:45 +00:00
available_parameters = { } # { 'table': ['hits_10m', 'hits_100m'], ... }
for e in subst_elems :
available_parameters [ e . find ( ' name ' ) . text ] = [ v . text for v in e . findall ( ' values/value ' ) ]
2019-12-26 17:35:41 +00:00
2020-02-21 19:58:52 +00:00
# Take care to keep the order of queries -- sometimes we have DROP IF EXISTS
# followed by CREATE in create queries section, so the order matters.
2020-03-03 13:38:45 +00:00
def substitute_parameters ( query_templates ) :
result = [ ]
for q in query_templates :
keys = set ( n for _ , n , _ , _ in string . Formatter ( ) . parse ( q ) if n )
values = [ available_parameters [ k ] for k in keys ]
result . extend ( [
q . format ( * * dict ( zip ( keys , values_combo ) ) )
for values_combo in itertools . product ( * values ) ] )
return result
2019-12-26 17:35:41 +00:00
2020-02-14 12:55:47 +00:00
report_stage_end ( ' substitute ' )
2020-03-02 15:05:58 +00:00
# Run drop queries, ignoring errors. Do this before all other activity, because
# clickhouse_driver disconnects on error (this is not configurable), and the new
# connection loses the changes in settings.
2019-12-26 17:35:41 +00:00
drop_query_templates = [ q . text for q in root . findall ( ' drop_query ' ) ]
2020-03-03 13:38:45 +00:00
drop_queries = substitute_parameters ( drop_query_templates )
2019-12-26 17:35:41 +00:00
for c in connections :
for q in drop_queries :
try :
c . execute ( q )
except :
2020-01-16 19:39:07 +00:00
pass
2019-12-26 17:35:41 +00:00
2020-03-02 15:05:58 +00:00
report_stage_end ( ' drop1 ' )
# Apply settings
settings = root . findall ( ' settings/* ' )
for c in connections :
for s in settings :
c . execute ( " set {} = ' {} ' " . format ( s . tag , s . text ) )
report_stage_end ( ' settings ' )
# Check tables that should exist. If they don't exist, just skip this test.
tables = [ e . text for e in root . findall ( ' preconditions/table_exists ' ) ]
for t in tables :
for c in connections :
try :
2020-03-11 21:07:34 +00:00
res = c . execute ( " select 1 from {} limit 1 " . format ( t ) )
2020-03-02 15:05:58 +00:00
except :
print ( ' skipped \t ' + traceback . format_exception_only ( * sys . exc_info ( ) [ : 2 ] ) [ - 1 ] )
traceback . print_exc ( )
sys . exit ( 0 )
report_stage_end ( ' preconditions ' )
2019-12-26 17:35:41 +00:00
# Run create queries
create_query_templates = [ q . text for q in root . findall ( ' create_query ' ) ]
2020-03-03 13:38:45 +00:00
create_queries = substitute_parameters ( create_query_templates )
2019-12-26 17:35:41 +00:00
for c in connections :
for q in create_queries :
c . execute ( q )
# Run fill queries
fill_query_templates = [ q . text for q in root . findall ( ' fill_query ' ) ]
2020-03-03 13:38:45 +00:00
fill_queries = substitute_parameters ( fill_query_templates )
2019-12-26 17:35:41 +00:00
for c in connections :
for q in fill_queries :
c . execute ( q )
2020-02-14 12:55:47 +00:00
report_stage_end ( ' fill ' )
2019-12-26 17:35:41 +00:00
# Run test queries
2019-12-26 21:33:10 +00:00
def tsv_escape ( s ) :
return s . replace ( ' \\ ' , ' \\ \\ ' ) . replace ( ' \t ' , ' \\ t ' ) . replace ( ' \n ' , ' \\ n ' ) . replace ( ' \r ' , ' ' )
2019-12-26 17:35:41 +00:00
test_query_templates = [ q . text for q in root . findall ( ' query ' ) ]
2020-03-03 13:38:45 +00:00
test_queries = substitute_parameters ( test_query_templates )
2019-12-26 17:35:41 +00:00
2020-02-14 12:55:47 +00:00
report_stage_end ( ' substitute2 ' )
2020-04-29 09:28:12 +00:00
for i , q in enumerate ( test_queries ) :
# We have some crazy long queries (about 100kB), so trim them to a sane
# length.
query_display_name = q
if len ( query_display_name ) > 1000 :
query_display_name = f ' { query_display_name [ : 1000 ] } ...( { i } ) '
2020-02-11 20:00:53 +00:00
# Prewarm: run once on both servers. Helps to bring the data into memory,
2020-02-11 15:01:16 +00:00
# precompile the queries, etc.
2020-04-17 15:47:01 +00:00
try :
for conn_index , c in enumerate ( connections ) :
2020-04-29 09:28:12 +00:00
res = c . execute ( q , query_id = f ' prewarm { 0 } { query_display_name } ' )
print ( f ' prewarm \t { tsv_escape ( query_display_name ) } \t { conn_index } \t { c . last_query . elapsed } ' )
2020-04-17 15:47:01 +00:00
except :
# If prewarm fails for some query -- skip it, and try to test the others.
# This might happen if the new test introduces some function that the
# old server doesn't support. Still, report it as an error.
2020-04-29 09:28:12 +00:00
# FIXME the driver reconnects on error and we lose settings, so this might
# lead to further errors or unexpected behavior.
2020-04-17 15:47:01 +00:00
print ( traceback . format_exc ( ) , file = sys . stderr )
continue
2020-02-11 15:01:16 +00:00
# Now, perform measured runs.
2020-01-16 19:39:07 +00:00
# Track the time spent by the client to process this query, so that we can notice
# out the queries that take long to process on the client side, e.g. by sending
# excessive data.
start_seconds = time . perf_counter ( )
server_seconds = 0
2020-01-21 18:15:25 +00:00
for run in range ( 0 , args . runs ) :
2019-12-26 17:35:41 +00:00
for conn_index , c in enumerate ( connections ) :
res = c . execute ( q )
2020-04-29 09:28:12 +00:00
print ( f ' query \t { tsv_escape ( query_display_name ) } \t { run } \t { conn_index } \t { c . last_query . elapsed } ' )
2020-01-16 19:39:07 +00:00
server_seconds + = c . last_query . elapsed
client_seconds = time . perf_counter ( ) - start_seconds
2020-04-29 09:28:12 +00:00
print ( f ' client-time \t { tsv_escape ( query_display_name ) } \t { client_seconds } \t { server_seconds } ' )
2019-12-26 17:35:41 +00:00
2020-02-14 12:55:47 +00:00
report_stage_end ( ' benchmark ' )
2019-12-26 17:35:41 +00:00
# Run drop queries
drop_query_templates = [ q . text for q in root . findall ( ' drop_query ' ) ]
2020-03-03 13:38:45 +00:00
drop_queries = substitute_parameters ( drop_query_templates )
2019-12-26 17:35:41 +00:00
for c in connections :
for q in drop_queries :
c . execute ( q )
2020-02-14 12:55:47 +00:00
2020-03-02 15:05:58 +00:00
report_stage_end ( ' drop2 ' )