mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-18 04:12:19 +00:00
Merge pull request #22868 from CurtizJ/prettify-logs
Prettify logs of integration tests
This commit is contained in:
commit
30f2d839c3
@ -334,12 +334,11 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
logging.info("Task timeout exceeded, skipping %s", test)
|
logging.info("Task timeout exceeded, skipping %s", test)
|
||||||
counters["SKIPPED"].append(test)
|
counters["SKIPPED"].append(test)
|
||||||
tests_times[test] = 0
|
tests_times[test] = 0
|
||||||
log_name = None
|
return counters, tests_times, []
|
||||||
log_path = None
|
|
||||||
return counters, tests_times, log_name, log_path
|
|
||||||
|
|
||||||
image_cmd = self._get_runner_image_cmd(repo_path)
|
image_cmd = self._get_runner_image_cmd(repo_path)
|
||||||
test_group_str = test_group.replace('/', '_').replace('.', '_')
|
test_group_str = test_group.replace('/', '_').replace('.', '_')
|
||||||
|
log_paths = []
|
||||||
|
|
||||||
for i in range(num_tries):
|
for i in range(num_tries):
|
||||||
logging.info("Running test group %s for the %s retry", test_group, i)
|
logging.info("Running test group %s for the %s retry", test_group, i)
|
||||||
@ -348,6 +347,7 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
output_path = os.path.join(str(self.path()), "test_output_" + test_group_str + "_" + str(i) + ".log")
|
output_path = os.path.join(str(self.path()), "test_output_" + test_group_str + "_" + str(i) + ".log")
|
||||||
log_name = "integration_run_" + test_group_str + "_" + str(i) + ".txt"
|
log_name = "integration_run_" + test_group_str + "_" + str(i) + ".txt"
|
||||||
log_path = os.path.join(str(self.path()), log_name)
|
log_path = os.path.join(str(self.path()), log_name)
|
||||||
|
log_paths.append(log_path)
|
||||||
logging.info("Will wait output inside %s", output_path)
|
logging.info("Will wait output inside %s", output_path)
|
||||||
|
|
||||||
test_names = set([])
|
test_names = set([])
|
||||||
@ -390,7 +390,7 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
if test not in counters["PASSED"] and test not in counters["ERROR"] and test not in counters["FAILED"]:
|
if test not in counters["PASSED"] and test not in counters["ERROR"] and test not in counters["FAILED"]:
|
||||||
counters["ERROR"].append(test)
|
counters["ERROR"].append(test)
|
||||||
|
|
||||||
return counters, tests_times, log_name, log_path
|
return counters, tests_times, log_paths
|
||||||
|
|
||||||
def run_flaky_check(self, repo_path, build_path):
|
def run_flaky_check(self, repo_path, build_path):
|
||||||
pr_info = self.params['pr_info']
|
pr_info = self.params['pr_info']
|
||||||
@ -408,12 +408,12 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
logging.info("Starting check with retries")
|
logging.info("Starting check with retries")
|
||||||
final_retry = 0
|
final_retry = 0
|
||||||
log_paths = []
|
logs = []
|
||||||
for i in range(TRIES_COUNT):
|
for i in range(TRIES_COUNT):
|
||||||
final_retry += 1
|
final_retry += 1
|
||||||
logging.info("Running tests for the %s time", i)
|
logging.info("Running tests for the %s time", i)
|
||||||
counters, tests_times, _, log_path = self.run_test_group(repo_path, "flaky", tests_to_run, 1)
|
counters, tests_times, log_paths = self.run_test_group(repo_path, "flaky", tests_to_run, 1)
|
||||||
log_paths.append(log_path)
|
logs += log_paths
|
||||||
if counters["FAILED"]:
|
if counters["FAILED"]:
|
||||||
logging.info("Found failed tests: %s", ' '.join(counters["FAILED"]))
|
logging.info("Found failed tests: %s", ' '.join(counters["FAILED"]))
|
||||||
description_prefix = "Flaky tests found: "
|
description_prefix = "Flaky tests found: "
|
||||||
@ -450,7 +450,7 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
test_result += [(c + ' (✕' + str(final_retry) + ')', text_state, "{:.2f}".format(tests_times[c])) for c in counters[state]]
|
test_result += [(c + ' (✕' + str(final_retry) + ')', text_state, "{:.2f}".format(tests_times[c])) for c in counters[state]]
|
||||||
status_text = description_prefix + ', '.join([str(n).lower().replace('failed', 'fail') + ': ' + str(len(c)) for n, c in counters.items()])
|
status_text = description_prefix + ', '.join([str(n).lower().replace('failed', 'fail') + ': ' + str(len(c)) for n, c in counters.items()])
|
||||||
|
|
||||||
return result_state, status_text, test_result, [test_logs] + log_paths
|
return result_state, status_text, test_result, [test_logs] + logs
|
||||||
|
|
||||||
def run_impl(self, repo_path, build_path):
|
def run_impl(self, repo_path, build_path):
|
||||||
if self.flaky_check:
|
if self.flaky_check:
|
||||||
@ -471,8 +471,8 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
"FLAKY": [],
|
"FLAKY": [],
|
||||||
}
|
}
|
||||||
tests_times = defaultdict(float)
|
tests_times = defaultdict(float)
|
||||||
|
tests_log_paths = defaultdict(list)
|
||||||
|
|
||||||
logs = []
|
|
||||||
items_to_run = list(grouped_tests.items())
|
items_to_run = list(grouped_tests.items())
|
||||||
|
|
||||||
logging.info("Total test groups %s", len(items_to_run))
|
logging.info("Total test groups %s", len(items_to_run))
|
||||||
@ -482,7 +482,7 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
|
|
||||||
for group, tests in items_to_run:
|
for group, tests in items_to_run:
|
||||||
logging.info("Running test group %s countaining %s tests", group, len(tests))
|
logging.info("Running test group %s countaining %s tests", group, len(tests))
|
||||||
group_counters, group_test_times, _, log_path = self.run_test_group(repo_path, group, tests, MAX_RETRY)
|
group_counters, group_test_times, log_paths = self.run_test_group(repo_path, group, tests, MAX_RETRY)
|
||||||
total_tests = 0
|
total_tests = 0
|
||||||
for counter, value in group_counters.items():
|
for counter, value in group_counters.items():
|
||||||
logging.info("Tests from group %s stats, %s count %s", group, counter, len(value))
|
logging.info("Tests from group %s stats, %s count %s", group, counter, len(value))
|
||||||
@ -493,7 +493,8 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
|
|
||||||
for test_name, test_time in group_test_times.items():
|
for test_name, test_time in group_test_times.items():
|
||||||
tests_times[test_name] = test_time
|
tests_times[test_name] = test_time
|
||||||
logs.append(log_path)
|
tests_log_paths[test_name] = log_paths
|
||||||
|
|
||||||
if len(counters["FAILED"]) + len(counters["ERROR"]) >= 20:
|
if len(counters["FAILED"]) + len(counters["ERROR"]) >= 20:
|
||||||
logging.info("Collected more than 20 failed/error tests, stopping")
|
logging.info("Collected more than 20 failed/error tests, stopping")
|
||||||
break
|
break
|
||||||
@ -518,7 +519,7 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
text_state = "FAIL"
|
text_state = "FAIL"
|
||||||
else:
|
else:
|
||||||
text_state = state
|
text_state = state
|
||||||
test_result += [(c, text_state, "{:.2f}".format(tests_times[c])) for c in counters[state]]
|
test_result += [(c, text_state, "{:.2f}".format(tests_times[c]), tests_log_paths[c]) for c in counters[state]]
|
||||||
|
|
||||||
failed_sum = len(counters['FAILED']) + len(counters['ERROR'])
|
failed_sum = len(counters['FAILED']) + len(counters['ERROR'])
|
||||||
status_text = "fail: {}, passed: {}, flaky: {}".format(failed_sum, len(counters['PASSED']), len(counters['FLAKY']))
|
status_text = "fail: {}, passed: {}, flaky: {}".format(failed_sum, len(counters['PASSED']), len(counters['FLAKY']))
|
||||||
@ -535,7 +536,7 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
if '(memory)' in self.params['context_name']:
|
if '(memory)' in self.params['context_name']:
|
||||||
result_state = "success"
|
result_state = "success"
|
||||||
|
|
||||||
return result_state, status_text, test_result, [test_logs] + logs
|
return result_state, status_text, test_result, [test_logs]
|
||||||
|
|
||||||
def write_results(results_file, status_file, results, status):
|
def write_results(results_file, status_file, results, status):
|
||||||
with open(results_file, 'w') as f:
|
with open(results_file, 'w') as f:
|
||||||
|
Loading…
Reference in New Issue
Block a user