mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 23:52:03 +00:00
Merge pull request #61658 from ClickHouse/ci_skip_never_fails_integr_test
CI: integration tests: use runner as py module
This commit is contained in:
commit
2db72e6588
@ -385,7 +385,11 @@ upgrade_check_digest = DigestConfig(
|
||||
docker=["clickhouse/upgrade-check"],
|
||||
)
|
||||
integration_check_digest = DigestConfig(
|
||||
include_paths=["./tests/ci/integration_test_check.py", "./tests/integration/"],
|
||||
include_paths=[
|
||||
"./tests/ci/integration_test_check.py",
|
||||
"./tests/ci/integration_tests_runner.py",
|
||||
"./tests/integration/",
|
||||
],
|
||||
exclude_files=[".md"],
|
||||
docker=IMAGES.copy(),
|
||||
)
|
||||
|
@ -25,7 +25,8 @@ from report import (
|
||||
read_test_results,
|
||||
)
|
||||
from stopwatch import Stopwatch
|
||||
from tee_popen import TeePopen
|
||||
|
||||
import integration_tests_runner as runner
|
||||
|
||||
|
||||
def get_json_params_dict(
|
||||
@ -206,11 +207,8 @@ def main():
|
||||
json_params.write(params_text)
|
||||
logging.info("Parameters file %s is written: %s", json_path, params_text)
|
||||
|
||||
output_path_log = result_path / "main_script_log.txt"
|
||||
|
||||
runner_path = repo_path / "tests" / "integration" / "ci-runner.py"
|
||||
run_command = f"sudo -E {runner_path}"
|
||||
logging.info("Going to run command: `%s`", run_command)
|
||||
for k, v in my_env.items():
|
||||
os.environ[k] = v
|
||||
logging.info(
|
||||
"ENV parameters for runner:\n%s",
|
||||
"\n".join(
|
||||
@ -218,31 +216,13 @@ def main():
|
||||
),
|
||||
)
|
||||
|
||||
integration_infrastructure_fail = False
|
||||
with TeePopen(run_command, output_path_log, my_env) as process:
|
||||
retcode = process.wait()
|
||||
if retcode == 0:
|
||||
logging.info("Run tests successfully")
|
||||
elif retcode == 13:
|
||||
logging.warning(
|
||||
"There were issues with infrastructure. Not writing status report to restart job."
|
||||
)
|
||||
integration_infrastructure_fail = True
|
||||
sys.exit(1)
|
||||
else:
|
||||
logging.info("Some tests failed")
|
||||
|
||||
# subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
||||
|
||||
if not integration_infrastructure_fail:
|
||||
state, description, test_results, additional_logs = process_results(result_path)
|
||||
try:
|
||||
runner.run()
|
||||
except Exception as e:
|
||||
logging.error("Exception: %s", e)
|
||||
state, description, test_results, additional_logs = ERROR, "infrastructure error", [TestResult("infrastructure error", ERROR, stopwatch.duration_seconds)], [] # type: ignore
|
||||
else:
|
||||
state, description, test_results, additional_logs = (
|
||||
ERROR,
|
||||
"no description",
|
||||
[TestResult("infrastructure error", ERROR, stopwatch.duration_seconds)],
|
||||
[],
|
||||
)
|
||||
state, description, test_results, additional_logs = process_results(result_path)
|
||||
|
||||
JobReport(
|
||||
description=description,
|
||||
@ -250,7 +230,7 @@ def main():
|
||||
status=state,
|
||||
start_time=stopwatch.start_time_str,
|
||||
duration=stopwatch.duration_seconds,
|
||||
additional_files=[output_path_log] + additional_logs,
|
||||
additional_files=additional_logs,
|
||||
).dump(to_file=args.report_to_file if args.report_to_file else None)
|
||||
|
||||
if state != SUCCESS:
|
||||
|
@ -13,11 +13,13 @@ import string
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
import zlib # for crc32
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
|
||||
from integration_test_images import IMAGES
|
||||
from env_helper import CI
|
||||
|
||||
MAX_RETRY = 1
|
||||
NUM_WORKERS = 5
|
||||
@ -102,7 +104,7 @@ def get_counters(fname):
|
||||
"PASSED": set([]),
|
||||
"FAILED": set([]),
|
||||
"SKIPPED": set([]),
|
||||
}
|
||||
} # type: Dict[str, Any]
|
||||
|
||||
with open(fname, "r", encoding="utf-8") as out:
|
||||
for line in out:
|
||||
@ -292,7 +294,7 @@ class ClickhouseIntegrationTestsRunner:
|
||||
return name + ":latest"
|
||||
return name
|
||||
|
||||
def get_image_version(self, name: str):
|
||||
def get_image_version(self, name: str) -> Any:
|
||||
if name in self.image_versions:
|
||||
return self.image_versions[name]
|
||||
logging.warning(
|
||||
@ -380,15 +382,15 @@ class ClickhouseIntegrationTestsRunner:
|
||||
os.chmod(CLICKHOUSE_ODBC_BRIDGE_BINARY_PATH, 0o777)
|
||||
os.chmod(CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH, 0o777)
|
||||
shutil.copy(
|
||||
CLICKHOUSE_BINARY_PATH, os.getenv("CLICKHOUSE_TESTS_SERVER_BIN_PATH")
|
||||
CLICKHOUSE_BINARY_PATH, os.getenv("CLICKHOUSE_TESTS_SERVER_BIN_PATH") # type: ignore
|
||||
)
|
||||
shutil.copy(
|
||||
CLICKHOUSE_ODBC_BRIDGE_BINARY_PATH,
|
||||
os.getenv("CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH"),
|
||||
os.getenv("CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH"), # type: ignore
|
||||
)
|
||||
shutil.copy(
|
||||
CLICKHOUSE_LIBRARY_BRIDGE_BINARY_PATH,
|
||||
os.getenv("CLICKHOUSE_TESTS_LIBRARY_BRIDGE_BIN_PATH"),
|
||||
os.getenv("CLICKHOUSE_TESTS_LIBRARY_BRIDGE_BIN_PATH"), # type: ignore
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@ -466,7 +468,7 @@ class ClickhouseIntegrationTestsRunner:
|
||||
|
||||
@staticmethod
|
||||
def group_test_by_file(tests):
|
||||
result = {}
|
||||
result = {} # type: Dict
|
||||
for test in tests:
|
||||
test_file = test.split("::")[0]
|
||||
if test_file not in result:
|
||||
@ -573,8 +575,8 @@ class ClickhouseIntegrationTestsRunner:
|
||||
"PASSED": [],
|
||||
"FAILED": [],
|
||||
"SKIPPED": [],
|
||||
}
|
||||
tests_times = defaultdict(float)
|
||||
} # type: Dict
|
||||
tests_times = defaultdict(float) # type: Dict
|
||||
for test in tests_in_group:
|
||||
counters["ERROR"].append(test)
|
||||
tests_times[test] = 0
|
||||
@ -595,8 +597,8 @@ class ClickhouseIntegrationTestsRunner:
|
||||
"SKIPPED": [],
|
||||
"BROKEN": [],
|
||||
"NOT_FAILED": [],
|
||||
}
|
||||
tests_times = defaultdict(float)
|
||||
} # type: Dict
|
||||
tests_times = defaultdict(float) # type: Dict
|
||||
|
||||
if self.soft_deadline_time < time.time():
|
||||
for test in tests_in_group:
|
||||
@ -894,7 +896,7 @@ class ClickhouseIntegrationTestsRunner:
|
||||
"SKIPPED": [],
|
||||
"BROKEN": [],
|
||||
"NOT_FAILED": [],
|
||||
}
|
||||
} # type: Dict
|
||||
tests_times = defaultdict(float)
|
||||
tests_log_paths = defaultdict(list)
|
||||
|
||||
@ -985,7 +987,7 @@ def write_results(results_file, status_file, results, status):
|
||||
out.writerow(status)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def run():
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
|
||||
|
||||
repo_path = os.environ.get("CLICKHOUSE_TESTS_REPO_PATH")
|
||||
@ -993,29 +995,36 @@ if __name__ == "__main__":
|
||||
result_path = os.environ.get("CLICKHOUSE_TESTS_RESULT_PATH")
|
||||
params_path = os.environ.get("CLICKHOUSE_TESTS_JSON_PARAMS_PATH")
|
||||
|
||||
assert params_path
|
||||
with open(params_path, "r", encoding="utf-8") as jfd:
|
||||
params = json.loads(jfd.read())
|
||||
runner = ClickhouseIntegrationTestsRunner(result_path, params)
|
||||
|
||||
logging.info("Running tests")
|
||||
|
||||
# Avoid overlaps with previous runs
|
||||
logging.info("Clearing dmesg before run")
|
||||
subprocess.check_call( # STYLE_CHECK_ALLOW_SUBPROCESS_CHECK_CALL
|
||||
"dmesg --clear", shell=True
|
||||
)
|
||||
if CI:
|
||||
# Avoid overlaps with previous runs
|
||||
logging.info("Clearing dmesg before run")
|
||||
subprocess.check_call( # STYLE_CHECK_ALLOW_SUBPROCESS_CHECK_CALL
|
||||
"sudo -E dmesg --clear", shell=True
|
||||
)
|
||||
|
||||
state, description, test_results, _ = runner.run_impl(repo_path, build_path)
|
||||
logging.info("Tests finished")
|
||||
|
||||
# Dump dmesg (to capture possible OOMs)
|
||||
logging.info("Dumping dmesg")
|
||||
subprocess.check_call( # STYLE_CHECK_ALLOW_SUBPROCESS_CHECK_CALL
|
||||
"dmesg -T", shell=True
|
||||
)
|
||||
if CI:
|
||||
# Dump dmesg (to capture possible OOMs)
|
||||
logging.info("Dumping dmesg")
|
||||
subprocess.check_call( # STYLE_CHECK_ALLOW_SUBPROCESS_CHECK_CALL
|
||||
"sudo -E dmesg -T", shell=True
|
||||
)
|
||||
|
||||
status = (state, description)
|
||||
out_results_file = os.path.join(str(runner.path()), "test_results.tsv")
|
||||
out_status_file = os.path.join(str(runner.path()), "check_status.tsv")
|
||||
write_results(out_results_file, out_status_file, test_results, status)
|
||||
logging.info("Result written")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
Loading…
Reference in New Issue
Block a user