mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 01:25:21 +00:00
Bugfix validate meta check
This commit is contained in:
parent
a4c410a1f5
commit
2551adc416
58
.github/workflows/pull_request.yml
vendored
58
.github/workflows/pull_request.yml
vendored
@ -1733,17 +1733,17 @@ jobs:
|
|||||||
docker kill "$(docker ps -q)" ||:
|
docker kill "$(docker ps -q)" ||:
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker rm -f "$(docker ps -a -q)" ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestBugfixCheck:
|
TestsBugfixCheck:
|
||||||
runs-on: [self-hosted, func-tester]
|
runs-on: [self-hosted, stress-tester]
|
||||||
steps:
|
steps:
|
||||||
- name: Set envs
|
- name: Set envs
|
||||||
run: |
|
run: |
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
TEMP_PATH=${{runner.temp}}/stateless_bugfix_asan
|
TEMP_PATH=${{runner.temp}}/tests_bugfix_check
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||||
CHECK_NAME=Stateless tests bugfix validate check (address, actions)
|
CHECK_NAME=Tests bugfix validate check (actions)
|
||||||
REPO_COPY=${{runner.temp}}/stateless_bugfix_asan/ClickHouse
|
|
||||||
KILL_TIMEOUT=3600
|
KILL_TIMEOUT=3600
|
||||||
|
REPO_COPY=${{runner.temp}}/tests_bugfix_check/ClickHouse
|
||||||
EOF
|
EOF
|
||||||
- name: Download json reports
|
- name: Download json reports
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v2
|
||||||
@ -1754,13 +1754,24 @@ jobs:
|
|||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
- name: Functional test
|
- name: Bugfix test
|
||||||
run: |
|
run: |
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
mkdir -p "$TEMP_PATH"
|
mkdir -p "$TEMP_PATH"
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY/tests/ci"
|
cd "$REPO_COPY/tests/ci"
|
||||||
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" --validate-bugfix
|
|
||||||
|
TEMP_PATH="${TEMP_PATH}/integration" \
|
||||||
|
REPORTS_PATH="${REPORTS_PATH}/integration" \
|
||||||
|
python3 integration_test_check.py "Integration tests bugfix validate check" \
|
||||||
|
--validate-bugfix --post-commit-status=file || echo 'ignore exit code'
|
||||||
|
|
||||||
|
TEMP_PATH="${TEMP_PATH}/stateless" \
|
||||||
|
REPORTS_PATH="${REPORTS_PATH}/stateless" \
|
||||||
|
python3 functional_test_check.py "Stateless tests bugfix validate check" "$KILL_TIMEOUT" \
|
||||||
|
--validate-bugfix --post-commit-status=file || echo 'ignore exit code'
|
||||||
|
|
||||||
|
python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/post_commit_status.tsv" "${TEMP_PATH}/integration/post_commit_status.tsv"
|
||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
@ -2726,39 +2737,6 @@ jobs:
|
|||||||
docker kill "$(docker ps -q)" ||:
|
docker kill "$(docker ps -q)" ||:
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
docker rm -f "$(docker ps -a -q)" ||:
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsBugfixCheck:
|
|
||||||
runs-on: [self-hosted, stress-tester]
|
|
||||||
steps:
|
|
||||||
- name: Set envs
|
|
||||||
run: |
|
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
TEMP_PATH=${{runner.temp}}/integration_tests_asan_bugfix_check
|
|
||||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
|
||||||
CHECK_NAME=Integration tests bugfix validate check (asan, actions)
|
|
||||||
REPO_COPY=${{runner.temp}}/integration_tests_asan_bugfix_check/ClickHouse
|
|
||||||
EOF
|
|
||||||
- name: Download json reports
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
path: ${{ env.REPORTS_PATH }}
|
|
||||||
- name: Clear repository
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Integration test
|
|
||||||
run: |
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
mkdir -p "$TEMP_PATH"
|
|
||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
cd "$REPO_COPY/tests/ci"
|
|
||||||
python3 integration_test_check.py "$CHECK_NAME" --validate-bugfix
|
|
||||||
- name: Cleanup
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker kill "$(docker ps -q)" ||:
|
|
||||||
docker rm -f "$(docker ps -a -q)" ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
#################################### UNIT TESTS #############################################
|
#################################### UNIT TESTS #############################################
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
|
46
tests/ci/bugfix_validate_check.py
Normal file
46
tests/ci/bugfix_validate_check.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import csv
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
NO_CHANGES_MSG = 'Nothing to run'
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('report1')
|
||||||
|
parser.add_argument('report2')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def post_commit_status_from_file(file_path):
|
||||||
|
res = []
|
||||||
|
with open(file_path, 'r', encoding='utf-8') as f:
|
||||||
|
fin = csv.reader(f, delimiter='\t')
|
||||||
|
res = list(itertools.islice(fin, 1))
|
||||||
|
if len(res) < 1:
|
||||||
|
raise Exception(f'Can\'t read from "{file_path}"')
|
||||||
|
if len(res[0]) != 3:
|
||||||
|
raise Exception(f'Can\'t read from "{file_path}"')
|
||||||
|
return res[0]
|
||||||
|
|
||||||
|
|
||||||
|
def process_results(file_path):
|
||||||
|
state, report_url, description = post_commit_status_from_file(file_path)
|
||||||
|
prefix = os.path.basename(os.path.dirname(file_path))
|
||||||
|
print(f'::notice:: bugfix check: {prefix} - {state}: {description} Report url: {report_url}')
|
||||||
|
return state == 'success'
|
||||||
|
|
||||||
|
|
||||||
|
def main(args):
|
||||||
|
is_ok = False
|
||||||
|
is_ok = process_results(args.report1) or is_ok
|
||||||
|
is_ok = process_results(args.report2) or is_ok
|
||||||
|
sys.exit(0 if is_ok else 1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main(parse_args())
|
@ -1,6 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
import os
|
||||||
|
import csv
|
||||||
from env_helper import GITHUB_REPOSITORY
|
from env_helper import GITHUB_REPOSITORY
|
||||||
from ci_config import CI_CONFIG
|
from ci_config import CI_CONFIG
|
||||||
|
|
||||||
@ -49,3 +51,11 @@ def post_commit_status(gh, sha, check_name, description, state, report_url):
|
|||||||
if i == RETRY - 1:
|
if i == RETRY - 1:
|
||||||
raise ex
|
raise ex
|
||||||
time.sleep(i)
|
time.sleep(i)
|
||||||
|
|
||||||
|
|
||||||
|
def post_commit_status_to_file(file_path, description, state, report_url):
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
raise Exception(f'File "{file_path}" already exists!')
|
||||||
|
with open(file_path, 'w', encoding='utf-8') as f:
|
||||||
|
out = csv.writer(f, delimiter='\t')
|
||||||
|
out.writerow([state, report_url, description])
|
||||||
|
@ -17,12 +17,15 @@ from build_download_helper import download_all_deb_packages
|
|||||||
from download_previous_release import download_previous_release
|
from download_previous_release import download_previous_release
|
||||||
from upload_result_helper import upload_results
|
from upload_result_helper import upload_results
|
||||||
from docker_pull_helper import get_image_with_version
|
from docker_pull_helper import get_image_with_version
|
||||||
from commit_status_helper import post_commit_status, get_commit, override_status
|
from commit_status_helper import post_commit_status, get_commit, override_status, post_commit_status_to_file
|
||||||
from clickhouse_helper import ClickHouseHelper, mark_flaky_tests, prepare_tests_results_for_clickhouse
|
from clickhouse_helper import ClickHouseHelper, mark_flaky_tests, prepare_tests_results_for_clickhouse
|
||||||
from stopwatch import Stopwatch
|
from stopwatch import Stopwatch
|
||||||
from rerun_helper import RerunHelper
|
from rerun_helper import RerunHelper
|
||||||
from tee_popen import TeePopen
|
from tee_popen import TeePopen
|
||||||
|
|
||||||
|
NO_CHANGES_MSG = 'Nothing to run'
|
||||||
|
|
||||||
|
|
||||||
def get_additional_envs(check_name, run_by_hash_num, run_by_hash_total):
|
def get_additional_envs(check_name, run_by_hash_num, run_by_hash_total):
|
||||||
result = []
|
result = []
|
||||||
if 'DatabaseReplicated' in check_name:
|
if 'DatabaseReplicated' in check_name:
|
||||||
@ -135,6 +138,7 @@ def parse_args():
|
|||||||
parser.add_argument("check_name")
|
parser.add_argument("check_name")
|
||||||
parser.add_argument("kill_timeout", type=int)
|
parser.add_argument("kill_timeout", type=int)
|
||||||
parser.add_argument("--validate-bugfix", action='store_true', help="Check that added tests failed on latest stable")
|
parser.add_argument("--validate-bugfix", action='store_true', help="Check that added tests failed on latest stable")
|
||||||
|
parser.add_argument("--post-commit-status", default='commit_status', choices=['commit_status', 'file'], help="Where to public post commit status")
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
@ -186,7 +190,11 @@ if __name__ == "__main__":
|
|||||||
if not tests_to_run:
|
if not tests_to_run:
|
||||||
commit = get_commit(gh, pr_info.sha)
|
commit = get_commit(gh, pr_info.sha)
|
||||||
state = override_status('success', check_name, validate_bugix_check)
|
state = override_status('success', check_name, validate_bugix_check)
|
||||||
commit.create_status(context=check_name_with_group, description='Not found changed stateless tests', state=state)
|
if args.post_commit_status == 'commit_status':
|
||||||
|
commit.create_status(context=check_name_with_group, description=NO_CHANGES_MSG, state=state)
|
||||||
|
elif args.post_commit_status == 'file':
|
||||||
|
fpath = os.path.join(temp_path, "post_commit_status.tsv")
|
||||||
|
post_commit_status_to_file(fpath, description=NO_CHANGES_MSG, state=state, report_url='null')
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
image_name = get_image_name(check_name)
|
image_name = get_image_name(check_name)
|
||||||
@ -239,8 +247,13 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [run_log_path] + additional_logs, check_name_with_group)
|
report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [run_log_path] + additional_logs, check_name_with_group)
|
||||||
|
|
||||||
print(f"::notice ::Report url: {report_url}")
|
print(f"::notice:: {check_name} Report url: {report_url}")
|
||||||
post_commit_status(gh, pr_info.sha, check_name_with_group, description, state, report_url)
|
if args.post_commit_status == 'commit_status':
|
||||||
|
post_commit_status(gh, pr_info.sha, check_name_with_group, description, state, report_url)
|
||||||
|
elif args.post_commit_status == 'file':
|
||||||
|
post_commit_status_to_file(os.path.join(temp_path, "post_commit_status.tsv"), description, state, report_url)
|
||||||
|
else:
|
||||||
|
raise Exception(f'Unknown post_commit_status option "{args.post_commit_status}"')
|
||||||
|
|
||||||
prepared_events = prepare_tests_results_for_clickhouse(pr_info, test_results, state, stopwatch.duration_seconds, stopwatch.start_time_str, report_url, check_name_with_group)
|
prepared_events = prepare_tests_results_for_clickhouse(pr_info, test_results, state, stopwatch.duration_seconds, stopwatch.start_time_str, report_url, check_name_with_group)
|
||||||
ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
|
ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
|
||||||
|
@ -18,7 +18,7 @@ from build_download_helper import download_all_deb_packages
|
|||||||
from download_previous_release import download_previous_release
|
from download_previous_release import download_previous_release
|
||||||
from upload_result_helper import upload_results
|
from upload_result_helper import upload_results
|
||||||
from docker_pull_helper import get_images_with_versions
|
from docker_pull_helper import get_images_with_versions
|
||||||
from commit_status_helper import post_commit_status, override_status
|
from commit_status_helper import post_commit_status, override_status, post_commit_status_to_file
|
||||||
from clickhouse_helper import ClickHouseHelper, mark_flaky_tests, prepare_tests_results_for_clickhouse
|
from clickhouse_helper import ClickHouseHelper, mark_flaky_tests, prepare_tests_results_for_clickhouse
|
||||||
from stopwatch import Stopwatch
|
from stopwatch import Stopwatch
|
||||||
from rerun_helper import RerunHelper
|
from rerun_helper import RerunHelper
|
||||||
@ -109,6 +109,7 @@ def parse_args():
|
|||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("check_name")
|
parser.add_argument("check_name")
|
||||||
parser.add_argument("--validate-bugfix", action='store_true', help="Check that added tests failed on latest stable")
|
parser.add_argument("--validate-bugfix", action='store_true', help="Check that added tests failed on latest stable")
|
||||||
|
parser.add_argument("--post-commit-status", default='commit_status', choices=['commit_status', 'file'], help="Where to public post commit status")
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
@ -198,8 +199,14 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
s3_helper = S3Helper('https://s3.amazonaws.com')
|
s3_helper = S3Helper('https://s3.amazonaws.com')
|
||||||
report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [output_path_log] + additional_logs, check_name_with_group, False)
|
report_url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [output_path_log] + additional_logs, check_name_with_group, False)
|
||||||
print(f"::notice ::Report url: {report_url}")
|
|
||||||
post_commit_status(gh, pr_info.sha, check_name_with_group, description, state, report_url)
|
print(f"::notice:: {check_name} Report url: {report_url}")
|
||||||
|
if args.post_commit_status == 'commit_status':
|
||||||
|
post_commit_status(gh, pr_info.sha, check_name_with_group, description, state, report_url)
|
||||||
|
elif args.post_commit_status == 'file':
|
||||||
|
post_commit_status_to_file(os.path.join(temp_path, "post_commit_status.tsv"), description, state, report_url)
|
||||||
|
else:
|
||||||
|
raise Exception(f'Unknown post_commit_status option "{args.post_commit_status}"')
|
||||||
|
|
||||||
prepared_events = prepare_tests_results_for_clickhouse(pr_info, test_results, state, stopwatch.duration_seconds, stopwatch.start_time_str, report_url, check_name_with_group)
|
prepared_events = prepare_tests_results_for_clickhouse(pr_info, test_results, state, stopwatch.duration_seconds, stopwatch.start_time_str, report_url, check_name_with_group)
|
||||||
ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
|
ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
|
||||||
|
@ -27,6 +27,8 @@ MAX_TIME_SECONDS = 3600
|
|||||||
MAX_TIME_IN_SANDBOX = 20 * 60 # 20 minutes
|
MAX_TIME_IN_SANDBOX = 20 * 60 # 20 minutes
|
||||||
TASK_TIMEOUT = 8 * 60 * 60 # 8 hours
|
TASK_TIMEOUT = 8 * 60 * 60 # 8 hours
|
||||||
|
|
||||||
|
NO_CHANGES_MSG = 'Nothing to run'
|
||||||
|
|
||||||
|
|
||||||
def stringhash(s):
|
def stringhash(s):
|
||||||
return zlib.crc32(s.encode("utf-8"))
|
return zlib.crc32(s.encode("utf-8"))
|
||||||
@ -710,7 +712,7 @@ class ClickhouseIntegrationTestsRunner:
|
|||||||
tests_to_run = get_changed_tests_to_run(pr_info, repo_path)
|
tests_to_run = get_changed_tests_to_run(pr_info, repo_path)
|
||||||
if not tests_to_run:
|
if not tests_to_run:
|
||||||
logging.info("No tests to run found")
|
logging.info("No tests to run found")
|
||||||
return "success", "Nothing to run", [("Nothing to run", "OK")], ""
|
return "success", NO_CHANGES_MSG, [(NO_CHANGES_MSG, "OK")], ""
|
||||||
|
|
||||||
self._install_clickhouse(build_path)
|
self._install_clickhouse(build_path)
|
||||||
logging.info("Found '%s' tests to run", " ".join(tests_to_run))
|
logging.info("Found '%s' tests to run", " ".join(tests_to_run))
|
||||||
|
Loading…
Reference in New Issue
Block a user