perf test error summary

This commit is contained in:
Alexander Kuzmenkov 2020-08-05 18:46:05 +03:00
parent 63391bbb66
commit 7945041a9a

View File

@ -17,6 +17,8 @@ parser.add_argument('--report', default='main', choices=['main', 'all-queries'],
help='Which report to build')
args = parser.parse_args()
tables = []
errors_described = []
report_errors = []
error_tests = 0
slow_average_tests = 0
@ -145,19 +147,40 @@ tr:nth-child(odd) td {{filter: brightness(90%);}}
table_anchor = 0
row_anchor = 0
def nextTableAnchor():
def currentTableAnchor():
global table_anchor
return f'{table_anchor}'
def newTableAnchor():
global table_anchor
table_anchor += 1
return str(table_anchor)
return currentTableAnchor()
def currentRowAnchor():
global row_anchor
global table_anchor
return f'{table_anchor}.{row_anchor}'
def nextRowAnchor():
global row_anchor
global table_anchor
return f'{table_anchor}.{row_anchor + 1}'
def setRowAnchor(anchor_row_part):
global row_anchor
global table_anchor
row_anchor = anchor_row_part
return currentRowAnchor()
def advanceRowAnchor():
global row_anchor
global table_anchor
row_anchor += 1
return str(table_anchor) + "." + str(row_anchor)
return currentRowAnchor()
def tr(x):
a = nextRowAnchor()
a = advanceRowAnchor()
#return '<tr onclick="location.href=\'#{a}\'" id={a}>{x}</tr>'.format(a=a, x=str(x))
return '<tr id={a}>{x}</tr>'.format(a=a, x=str(x))
@ -180,8 +203,10 @@ def tableHeader(r):
return tr(''.join([th(f) for f in r]))
def tableStart(title):
anchor = nextTableAnchor();
cls = '-'.join(title.lower().split(' ')[:3]);
global table_anchor
table_anchor = cls
anchor = currentTableAnchor()
return f"""
<h2 id="{anchor}">
<a class="cancela" href="#{anchor}">{title}</a>
@ -211,20 +236,23 @@ def htmlRows(n):
result += tableRow(row)
return result
def printSimpleTable(caption, columns, rows):
def addSimpleTable(caption, columns, rows, pos=None):
global tables
text = ''
if not rows:
return
print(tableStart(caption))
print(tableHeader(columns))
text += tableStart(caption)
text += tableHeader(columns)
for row in rows:
print(tableRow(row))
print(tableEnd())
text += tableRow(row)
text += tableEnd()
tables.insert(pos if pos else len(tables), text)
def print_tested_commits():
def add_tested_commits():
global report_errors
try:
printSimpleTable('Tested commits', ['Old', 'New'],
addSimpleTable('Tested commits', ['Old', 'New'],
[['<pre>{}</pre>'.format(x) for x in
[open('left-commit.txt').read(),
open('right-commit.txt').read()]]])
@ -235,7 +263,8 @@ def print_tested_commits():
*sys.exc_info()[:2])[-1])
pass
def print_report_errors():
def add_report_errors():
global tables
global report_errors
# Add the errors reported by various steps of comparison script
try:
@ -246,67 +275,89 @@ def print_report_errors():
*sys.exc_info()[:2])[-1])
pass
if len(report_errors):
print(tableStart('Errors while building the report'))
print(tableHeader(['Error']))
for x in report_errors:
print(tableRow([x]))
print(tableEnd())
if not report_errors:
return
text = tableStart('Errors while building the report')
text += tableHeader(['Error'])
for x in report_errors:
text += tableRow([x])
text += tableEnd()
# Insert after Tested Commits
tables.insert(1, text)
errors_described.append([f'<a href="#{currentTableAnchor()}">There were some errors while building the report</a>']);
def add_errors_explained():
global tables
addSimpleTable('Error summary', ['Description'], errors_described, 1)
if args.report == 'main':
print(header_template.format())
print_tested_commits()
add_tested_commits()
run_error_rows = tsvRows('run-errors.tsv')
error_tests += len(run_error_rows)
printSimpleTable('Run errors', ['Test', 'Error'], run_error_rows)
addSimpleTable('Run errors', ['Test', 'Error'], run_error_rows)
if run_error_rows:
errors_described.append([f'<a href="#{currentTableAnchor()}">There were some errors while running the tests</a>']);
slow_on_client_rows = tsvRows('report/slow-on-client.tsv')
error_tests += len(slow_on_client_rows)
printSimpleTable('Slow on client',
addSimpleTable('Slow on client',
['Client time,&nbsp;s', 'Server time,&nbsp;s', 'Ratio', 'Test', 'Query'],
slow_on_client_rows)
if slow_on_client_rows:
errors_described.append([f'<a href="#{currentTableAnchor()}">Some queries are taking noticeable time client-side (missing `FORMAT Null`?)</a>']);
unmarked_short_rows = tsvRows('report/unmarked-short-queries.tsv')
error_tests += len(unmarked_short_rows)
printSimpleTable('Short queries not marked as short',
addSimpleTable('Short queries not marked as short',
['New client time, s', 'Test', '#', 'Query'],
unmarked_short_rows)
if unmarked_short_rows:
errors_described.append([f'<a href="#{currentTableAnchor()}">Some queries have short duration but are not explicitly marked as "short"</a>']);
def print_partial():
def add_partial():
rows = tsvRows('report/partial-queries-report.tsv')
if not rows:
return
global unstable_partial_queries, slow_average_tests
print(tableStart('Partial queries'))
global unstable_partial_queries, slow_average_tests, tables
text = tableStart('Partial queries')
columns = ['Median time, s', 'Relative time variance', 'Test', '#', 'Query']
print(tableHeader(columns))
text += tableHeader(columns)
attrs = ['' for c in columns]
for row in rows:
if float(row[1]) > 0.10:
attrs[1] = f'style="background: {color_bad}"'
unstable_partial_queries += 1
errors_described.append([f'<a href="#{nextRowAnchor()}">The query no. {row[3]} of test \'{row[2]}\' has excessive variance of run time. Keep it below 10%</a>'])
else:
attrs[1] = ''
if float(row[0]) > allowed_single_run_time:
attrs[0] = f'style="background: {color_bad}"'
errors_described.append([f'<a href="#{nextRowAnchor()}">The query no. {row[3]} of test \'{row[2]}\' is taking too long to run. Keep the run time below {allowed_single_run} seconds"</a>'])
slow_average_tests += 1
else:
attrs[0] = ''
print(tableRow(row, attrs))
print(tableEnd())
text += tableRow(row, attrs)
text += tableEnd()
tables.append(text)
print_partial()
add_partial()
def print_changes():
def add_changes():
rows = tsvRows('report/changed-perf.tsv')
if not rows:
return
global faster_queries, slower_queries
global faster_queries, slower_queries, tables
print(tableStart('Changes in performance'))
text = tableStart('Changes in performance')
columns = [
'Old,&nbsp;s', # 0
'New,&nbsp;s', # 1
@ -319,7 +370,7 @@ if args.report == 'main':
'Query', # 8
]
print(tableHeader(columns))
text += tableHeader(columns)
attrs = ['' for c in columns]
attrs[5] = None
@ -331,18 +382,19 @@ if args.report == 'main':
else:
slower_queries += 1
attrs[2] = attrs[3] = f'style="background: {color_bad}"'
errors_described.append([f'<a href="#{nextRowAnchor()}">The query no. {row[7]} of test \'{row[6]}\' has slowed down</a>'])
else:
attrs[2] = attrs[3] = ''
print(tableRow(row, attrs))
text += tableRow(row, attrs)
print(tableEnd())
text += tableEnd()
tables.append(text)
print_changes()
add_changes()
def print_unstable_queries():
global unstable_queries
global very_unstable_queries
def add_unstable_queries():
global unstable_queries, very_unstable_queries, tables
unstable_rows = tsvRows('report/unstable-queries.tsv')
if not unstable_rows:
@ -361,8 +413,8 @@ if args.report == 'main':
'Query' #7
]
print(tableStart('Unstable queries'))
print(tableHeader(columns))
text = tableStart('Unstable queries')
text += tableHeader(columns)
attrs = ['' for c in columns]
attrs[4] = None
@ -373,21 +425,22 @@ if args.report == 'main':
else:
attrs[3] = ''
print(tableRow(r, attrs))
text += tableRow(r, attrs)
print(tableEnd())
text += tableEnd()
tables.append(text)
print_unstable_queries()
add_unstable_queries()
skipped_tests_rows = tsvRows('analyze/skipped-tests.tsv')
printSimpleTable('Skipped tests', ['Test', 'Reason'], skipped_tests_rows)
addSimpleTable('Skipped tests', ['Test', 'Reason'], skipped_tests_rows)
printSimpleTable('Test performance changes',
addSimpleTable('Test performance changes',
['Test', 'Queries', 'Unstable', 'Changed perf', 'Total not OK', 'Avg relative time diff'],
tsvRows('report/test-perf-changes.tsv'))
def print_test_times():
global slow_average_tests
def add_test_times():
global slow_average_tests, tables
rows = tsvRows('report/test-times.tsv')
if not rows:
return
@ -403,8 +456,8 @@ if args.report == 'main':
'Shortest query<br>(sum for all runs),&nbsp;s', #7
]
print(tableStart('Test times'))
print(tableHeader(columns))
text = tableStart('Test times')
text += tableHeader(columns)
nominal_runs = 13 # FIXME pass this as an argument
total_runs = (nominal_runs + 1) * 2 # one prewarm run, two servers
@ -414,22 +467,25 @@ if args.report == 'main':
# FIXME should be 15s max -- investigate parallel_insert
slow_average_tests += 1
attrs[6] = f'style="background: {color_bad}"'
errors_explained.append(f'<a href="./all-queries.html#all-query-times.0">The test \'{r[0]}\' is too slow to run as a whole. Investigate whether the create and fill queries can be sped up')
else:
attrs[6] = ''
if float(r[5]) > allowed_single_run_time * total_runs:
slow_average_tests += 1
attrs[5] = f'style="background: {color_bad}"'
errors_explained.append(f'<a href="./all-queries.html#all-query-times.0">Some query of the test \'{r[0]}\' is too slow to run. See the all queries report')
else:
attrs[5] = ''
print(tableRow(r, attrs))
text += tableRow(r, attrs)
print(tableEnd())
text += tableEnd()
tables.append(text)
print_test_times()
add_test_times()
def print_benchmark_results():
def add_benchmark_results():
if not os.path.isfile('benchmark/website-left.json'):
return
@ -479,26 +535,33 @@ if args.report == 'main':
all_rows.append([row, attrs])
print(tableStart('Concurrent benchmarks'))
print(tableHeader(header))
text = tableStart('Concurrent benchmarks')
text += tableHeader(header)
for row, attrs in all_rows:
print(tableRow(row, attrs))
print(tableEnd())
text += tableRow(row, attrs)
text += tableEnd()
global tables
tables.append(text)
try:
print_benchmark_results()
add_benchmark_results()
except:
report_errors.append(
traceback.format_exception_only(
*sys.exc_info()[:2])[-1])
pass
printSimpleTable('Metric changes',
addSimpleTable('Metric changes',
['Metric', 'Old median value', 'New median value',
'Relative difference', 'Times difference'],
tsvRows('metrics/changes.tsv'))
print_report_errors()
add_report_errors()
add_errors_explained()
for t in tables:
print(t)
print("""
<p class="links">
@ -559,9 +622,9 @@ elif args.report == 'all-queries':
print(header_template.format())
print_tested_commits()
add_tested_commits()
def print_all_queries():
def add_all_queries():
rows = tsvRows('report/all-queries.tsv')
if not rows:
return
@ -579,8 +642,8 @@ elif args.report == 'all-queries':
'Query', #9
]
print(tableStart('All query times'))
print(tableHeader(columns))
text = tableStart('All query times')
text += tableHeader(columns)
attrs = ['' for c in columns]
attrs[0] = None
@ -606,13 +669,15 @@ elif args.report == 'all-queries':
attrs[2] = ''
attrs[3] = ''
print(tableRow(r, attrs))
text += tableRow(r, attrs)
print(tableEnd())
text += tableEnd()
tables.append(text)
print_all_queries()
print_report_errors()
add_all_queries()
add_report_errors()
for t in tables:
print(t)
print("""
<p class="links">