mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-23 16:12:01 +00:00
test
This commit is contained in:
parent
e198b20509
commit
a828e3e923
108
.github/workflows/pr.yaml
vendored
108
.github/workflows/pr.yaml
vendored
@ -31,8 +31,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -72,8 +71,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -113,8 +111,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -154,8 +151,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -195,8 +191,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -236,8 +231,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -266,19 +260,18 @@ jobs:
|
||||
python3 -m praktika run --job '''Build (amd_release)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
|
||||
fi
|
||||
|
||||
stateless_tests_amd_debug_parallel_1_2:
|
||||
stateless_tests_amd_debugparallel:
|
||||
runs-on: [builder]
|
||||
needs: [config_workflow, docker_builds, build_amd_debug]
|
||||
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWQsIGRlYnVnKSAocGFyYWxsZWwgMS8yKQ==') }}
|
||||
name: "Stateless tests (amd, debug) (parallel 1/2)"
|
||||
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcscGFyYWxsZWwp') }}
|
||||
name: "Stateless tests (amd_debug,parallel)"
|
||||
outputs:
|
||||
data: ${{ steps.run.outputs.DATA }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -302,24 +295,63 @@ jobs:
|
||||
. /tmp/praktika_setup_env.sh
|
||||
set -o pipefail
|
||||
if command -v ts &> /dev/null; then
|
||||
python3 -m praktika run --job '''Stateless tests (amd, debug) (parallel 1/2)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
|
||||
python3 -m praktika run --job '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
|
||||
else
|
||||
python3 -m praktika run --job '''Stateless tests (amd, debug) (parallel 1/2)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
|
||||
python3 -m praktika run --job '''Stateless tests (amd_debug,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
|
||||
fi
|
||||
|
||||
stateless_tests_amd_debug_parallel_2_2:
|
||||
stateless_tests_amd_debugnon_parallel:
|
||||
runs-on: [func-tester]
|
||||
needs: [config_workflow, docker_builds, build_amd_debug]
|
||||
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfZGVidWcsbm9uLXBhcmFsbGVsKQ==') }}
|
||||
name: "Stateless tests (amd_debug,non-parallel)"
|
||||
outputs:
|
||||
data: ${{ steps.run.outputs.DATA }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF'
|
||||
export PYTHONPATH=./ci:.
|
||||
|
||||
cat > /tmp/praktika/workflow_config_pr.json << 'EOF'
|
||||
${{ needs.config_workflow.outputs.data }}
|
||||
EOF
|
||||
cat > /tmp/praktika/workflow_status.json << 'EOF'
|
||||
${{ toJson(needs) }}
|
||||
EOF
|
||||
ENV_SETUP_SCRIPT_EOF
|
||||
|
||||
rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika
|
||||
mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output
|
||||
|
||||
- name: Run
|
||||
id: run
|
||||
run: |
|
||||
. /tmp/praktika_setup_env.sh
|
||||
set -o pipefail
|
||||
if command -v ts &> /dev/null; then
|
||||
python3 -m praktika run --job '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
|
||||
else
|
||||
python3 -m praktika run --job '''Stateless tests (amd_debug,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
|
||||
fi
|
||||
|
||||
stateless_tests_amd_releaseparallel:
|
||||
runs-on: [builder]
|
||||
needs: [config_workflow, docker_builds, build_amd_debug]
|
||||
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWQsIGRlYnVnKSAocGFyYWxsZWwgMi8yKQ==') }}
|
||||
name: "Stateless tests (amd, debug) (parallel 2/2)"
|
||||
needs: [config_workflow, docker_builds, build_amd_release]
|
||||
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxwYXJhbGxlbCk=') }}
|
||||
name: "Stateless tests (amd_release,parallel)"
|
||||
outputs:
|
||||
data: ${{ steps.run.outputs.DATA }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -343,24 +375,23 @@ jobs:
|
||||
. /tmp/praktika_setup_env.sh
|
||||
set -o pipefail
|
||||
if command -v ts &> /dev/null; then
|
||||
python3 -m praktika run --job '''Stateless tests (amd, debug) (parallel 2/2)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
|
||||
python3 -m praktika run --job '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
|
||||
else
|
||||
python3 -m praktika run --job '''Stateless tests (amd, debug) (parallel 2/2)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
|
||||
python3 -m praktika run --job '''Stateless tests (amd_release,parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
|
||||
fi
|
||||
|
||||
stateless_tests_amd_debug_non_parallel:
|
||||
runs-on: [style-checker]
|
||||
needs: [config_workflow, docker_builds, build_amd_debug]
|
||||
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWQsIGRlYnVnKSAobm9uLXBhcmFsbGVsKQ==') }}
|
||||
name: "Stateless tests (amd, debug) (non-parallel)"
|
||||
stateless_tests_amd_releasenon_parallel:
|
||||
runs-on: [func-tester]
|
||||
needs: [config_workflow, docker_builds, build_amd_release]
|
||||
if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWRfcmVsZWFzZSxub24tcGFyYWxsZWwp') }}
|
||||
name: "Stateless tests (amd_release,non-parallel)"
|
||||
outputs:
|
||||
data: ${{ steps.run.outputs.DATA }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -384,14 +415,14 @@ jobs:
|
||||
. /tmp/praktika_setup_env.sh
|
||||
set -o pipefail
|
||||
if command -v ts &> /dev/null; then
|
||||
python3 -m praktika run --job '''Stateless tests (amd, debug) (non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
|
||||
python3 -m praktika run --job '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log
|
||||
else
|
||||
python3 -m praktika run --job '''Stateless tests (amd, debug) (non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
|
||||
python3 -m praktika run --job '''Stateless tests (amd_release,non-parallel)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log
|
||||
fi
|
||||
|
||||
finish_workflow:
|
||||
runs-on: [ci_services]
|
||||
needs: [config_workflow, docker_builds, style_check, fast_test, build_amd_debug, build_amd_release, stateless_tests_amd_debug_parallel_1_2, stateless_tests_amd_debug_parallel_2_2, stateless_tests_amd_debug_non_parallel]
|
||||
needs: [config_workflow, docker_builds, style_check, fast_test, build_amd_debug, build_amd_release, stateless_tests_amd_debugparallel, stateless_tests_amd_debugnon_parallel, stateless_tests_amd_releaseparallel, stateless_tests_amd_releasenon_parallel]
|
||||
if: ${{ !cancelled() }}
|
||||
name: "Finish Workflow"
|
||||
outputs:
|
||||
@ -400,8 +431,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{ github.event.pull_reguest.head.sha }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
|
@ -1,6 +1,5 @@
|
||||
import argparse
|
||||
|
||||
from praktika.param import get_param
|
||||
from praktika.result import Result
|
||||
from praktika.settings import Settings
|
||||
from praktika.utils import MetaClasses, Shell, Utils
|
||||
@ -16,8 +15,7 @@ def parse_args():
|
||||
parser = argparse.ArgumentParser(description="ClickHouse Build Job")
|
||||
parser.add_argument(
|
||||
"--build-type",
|
||||
help="Type: <amd|arm>_<debug|release>_<asan|msan|..>",
|
||||
default=None,
|
||||
help="Type: <amd|arm>,<debug|release>,<asan|msan|..>",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--param",
|
||||
@ -30,7 +28,7 @@ def parse_args():
|
||||
CMAKE_CMD = """cmake --debug-trycompile -DCMAKE_VERBOSE_MAKEFILE=1 -LA \
|
||||
-DCMAKE_BUILD_TYPE={BUILD_TYPE} \
|
||||
-DSANITIZE={SANITIZER} \
|
||||
-DENABLE_CHECK_HEAVY_BUILDS=1 -DENABLE_CLICKHOUSE_SELF_EXTRACTING=1 -DENABLE_TESTS=0 \
|
||||
-DENABLE_CHECK_HEAVY_BUILDS=1 -DENABLE_CLICKHOUSE_SELF_EXTRACTING=1 \
|
||||
-DENABLE_UTILS=0 -DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=ON -DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DCMAKE_INSTALL_SYSCONFDIR=/etc -DCMAKE_INSTALL_LOCALSTATEDIR=/var -DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=ON \
|
||||
{AUX_DEFS} \
|
||||
@ -54,33 +52,26 @@ def main():
|
||||
stages.pop(0)
|
||||
stages.insert(0, stage)
|
||||
|
||||
cmake_build_type = "Release"
|
||||
sanitizer = ""
|
||||
|
||||
if args.build_type and get_param():
|
||||
assert (
|
||||
False
|
||||
), "Build type must provided via job parameter (CI case) or via --build-type input argument not both"
|
||||
|
||||
build_type = args.build_type or get_param()
|
||||
build_type = args.build_type
|
||||
assert (
|
||||
build_type
|
||||
), "build_type must be provided either as input argument or as a parameter of parametrized job in CI"
|
||||
build_type = build_type.lower()
|
||||
|
||||
# if Environment.is_local_run():
|
||||
# build_cache_type = "disabled"
|
||||
# else:
|
||||
CACHE_TYPE = "sccache"
|
||||
|
||||
if "debug" in build_type:
|
||||
print("Build type set: debug")
|
||||
BUILD_TYPE = "Debug"
|
||||
AUX_DEFS = " -DSPLIT_DEBUG_SYMBOLS=ON -DBUILD_STANDALONE_KEEPER=1 "
|
||||
AUX_DEFS = (
|
||||
" -DENABLE_TESTS=1 -DSPLIT_DEBUG_SYMBOLS=ON -DBUILD_STANDALONE_KEEPER=1 "
|
||||
)
|
||||
elif "release" in build_type:
|
||||
print("Build type set: release")
|
||||
BUILD_TYPE = "None"
|
||||
AUX_DEFS = " -DENABLE_TESTS=1 "
|
||||
BUILD_TYPE = "RelWithDebInfo"
|
||||
AUX_DEFS = " -DENABLE_TESTS=0 "
|
||||
else:
|
||||
assert False
|
||||
|
||||
if "asan" in build_type:
|
||||
print("Sanitizer set: address")
|
||||
@ -136,6 +127,7 @@ def main():
|
||||
Shell.check(f"ls -l {build_dir}/programs/")
|
||||
res = results[-1].is_ok()
|
||||
|
||||
|
||||
Result.create_from(results=results, stopwatch=stop_watch).complete_job()
|
||||
|
||||
|
||||
|
@ -215,11 +215,13 @@ def main():
|
||||
)
|
||||
|
||||
if res and JobStages.TEST in stages:
|
||||
stop_watch_ = Utils.Stopwatch()
|
||||
step_name = "Tests"
|
||||
print(step_name)
|
||||
res = res and CH.run_fast_test()
|
||||
if res:
|
||||
results.append(FTResultsProcessor(wd=Settings.OUTPUT_DIR).run())
|
||||
results[-1].set_timing(stopwatch=stop_watch_)
|
||||
|
||||
CH.terminate()
|
||||
|
||||
|
@ -1,15 +1,13 @@
|
||||
import argparse
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from praktika.param import get_param
|
||||
from praktika.result import Result
|
||||
from praktika.settings import Settings
|
||||
from praktika.utils import MetaClasses, Shell, Utils
|
||||
|
||||
from ci.jobs.scripts.clickhouse_proc import ClickHouseProc
|
||||
from ci.jobs.scripts.functional_tests_results import FTResultsProcessor
|
||||
from ci.settings.definitions import azure_secret
|
||||
|
||||
|
||||
class JobStages(metaclass=MetaClasses.WithIter):
|
||||
@ -21,9 +19,14 @@ class JobStages(metaclass=MetaClasses.WithIter):
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="ClickHouse Build Job")
|
||||
parser.add_argument(
|
||||
"BUILD_TYPE", help="Type: <amd|arm>_<debug|release>_<asan|tsan|..>"
|
||||
"--ch-path", help="Path to clickhouse binary", default=f"{Settings.INPUT_DIR}"
|
||||
)
|
||||
parser.add_argument("--param", help="Optional custom job start stage", default=None)
|
||||
parser.add_argument(
|
||||
"--test-options",
|
||||
help="Comma separated option(s): parallel|non-parallel|BATCH_NUM/BTATCH_TOT|..",
|
||||
default="",
|
||||
)
|
||||
parser.add_argument("--param", help="Optional job start stage", default=None)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
@ -50,28 +53,31 @@ def run_stateless_test(
|
||||
def main():
|
||||
|
||||
args = parse_args()
|
||||
params = get_param().split(" ")
|
||||
parallel_or_sequential = None
|
||||
no_parallel = False
|
||||
no_sequential = False
|
||||
if params:
|
||||
parallel_or_sequential = params[0]
|
||||
if len(params) > 1:
|
||||
batch_num, total_batches = map(int, params[1].split("/"))
|
||||
else:
|
||||
batch_num, total_batches = 0, 0
|
||||
if parallel_or_sequential:
|
||||
no_parallel = parallel_or_sequential == "non-parallel"
|
||||
no_sequential = parallel_or_sequential == "parallel"
|
||||
test_options = args.test_options.split(",")
|
||||
no_parallel = "non-parallel" in test_options
|
||||
no_sequential = "parallel" in test_options
|
||||
batch_num, total_batches = 0, 0
|
||||
for to in test_options:
|
||||
if "/" in to:
|
||||
batch_num, total_batches = map(int, to.split("/"))
|
||||
|
||||
os.environ["AZURE_CONNECTION_STRING"] = Shell.get_output(
|
||||
f"aws ssm get-parameter --region us-east-1 --name azure_connection_string --with-decryption --output text --query Parameter.Value",
|
||||
verbose=True,
|
||||
)
|
||||
# os.environ["AZURE_CONNECTION_STRING"] = Shell.get_output(
|
||||
# f"aws ssm get-parameter --region us-east-1 --name azure_connection_string --with-decryption --output text --query Parameter.Value",
|
||||
# verbose=True,
|
||||
# strict=True
|
||||
# )
|
||||
|
||||
ch_path = args.ch_path
|
||||
assert Path(
|
||||
ch_path + "/clickhouse"
|
||||
).is_file(), f"clickhouse binary not found under [{ch_path}]"
|
||||
|
||||
stop_watch = Utils.Stopwatch()
|
||||
|
||||
stages = list(JobStages)
|
||||
|
||||
logs_to_attach = []
|
||||
|
||||
stage = args.param or JobStages.INSTALL_CLICKHOUSE
|
||||
if stage:
|
||||
assert stage in JobStages, f"--param must be one of [{list(JobStages)}]"
|
||||
@ -83,19 +89,22 @@ def main():
|
||||
res = True
|
||||
results = []
|
||||
|
||||
Utils.add_to_PATH(f"{Settings.INPUT_DIR}:tests")
|
||||
Utils.add_to_PATH(f"{ch_path}:tests")
|
||||
|
||||
if res and JobStages.INSTALL_CLICKHOUSE in stages:
|
||||
commands = [
|
||||
f"chmod +x {Settings.INPUT_DIR}/clickhouse",
|
||||
f"ln -sf {Settings.INPUT_DIR}/clickhouse {Settings.INPUT_DIR}/clickhouse-server",
|
||||
f"ln -sf {Settings.INPUT_DIR}/clickhouse {Settings.INPUT_DIR}/clickhouse-client",
|
||||
f"chmod +x {ch_path}/clickhouse",
|
||||
f"ln -sf {ch_path}/clickhouse {ch_path}/clickhouse-server",
|
||||
f"ln -sf {ch_path}/clickhouse {ch_path}/clickhouse-client",
|
||||
f"rm -rf {Settings.TEMP_DIR}/etc/ && mkdir -p {Settings.TEMP_DIR}/etc/clickhouse-client {Settings.TEMP_DIR}/etc/clickhouse-server",
|
||||
f"cp programs/server/config.xml programs/server/users.xml {Settings.TEMP_DIR}/etc/clickhouse-server/",
|
||||
f"./tests/config/install.sh {Settings.TEMP_DIR}/etc/clickhouse-server {Settings.TEMP_DIR}/etc/clickhouse-client --s3-storage",
|
||||
# update_path_ch_config,
|
||||
f"sed -i 's|>/var/|>{Settings.TEMP_DIR}/var/|g; s|>/etc/|>{Settings.TEMP_DIR}/etc/|g' {Settings.TEMP_DIR}/etc/clickhouse-server/config.xml",
|
||||
f"sed -i 's|>/etc/|>{Settings.TEMP_DIR}/etc/|g' {Settings.TEMP_DIR}/etc/clickhouse-server/config.d/ssl_certs.xml",
|
||||
# f"sed -i 's|>/var/|>{Settings.TEMP_DIR}/var/|g; s|>/etc/|>{Settings.TEMP_DIR}/etc/|g' {Settings.TEMP_DIR}/etc/clickhouse-server/config.xml",
|
||||
# f"sed -i 's|>/etc/|>{Settings.TEMP_DIR}/etc/|g' {Settings.TEMP_DIR}/etc/clickhouse-server/config.d/ssl_certs.xml",
|
||||
f"for file in /tmp/praktika/etc/clickhouse-server/config.d/*.xml; do [ -f $file ] && echo Change config $file && sed -i 's|>/var/log|>{Settings.TEMP_DIR}/var/log|g; s|>/etc/|>{Settings.TEMP_DIR}/etc/|g' $(readlink -f $file); done",
|
||||
f"for file in /tmp/praktika/etc/clickhouse-server/*.xml; do [ -f $file ] && echo Change config $file && sed -i 's|>/var/log|>{Settings.TEMP_DIR}/var/log|g; s|>/etc/|>{Settings.TEMP_DIR}/etc/|g' $(readlink -f $file); done",
|
||||
f"for file in /tmp/praktika/etc/clickhouse-server/config.d/*.xml; do [ -f $file ] && echo Change config $file && sed -i 's|<path>local_disk|<path>{Settings.TEMP_DIR}/local_disk|g' $(readlink -f $file); done",
|
||||
f"clickhouse-server --version",
|
||||
]
|
||||
results.append(
|
||||
@ -110,22 +119,27 @@ def main():
|
||||
stop_watch_ = Utils.Stopwatch()
|
||||
step_name = "Start ClickHouse Server"
|
||||
print(step_name)
|
||||
res = res and CH.start_minio()
|
||||
minio_log = "/tmp/praktika/output/minio.log"
|
||||
res = res and CH.start_minio(log_file_path=minio_log)
|
||||
logs_to_attach += [minio_log]
|
||||
time.sleep(10)
|
||||
Shell.check("ps -ef | grep minio", verbose=True)
|
||||
res = res and Shell.check(
|
||||
"aws s3 ls s3://test --endpoint-url http://localhost:11111/", verbose=True
|
||||
)
|
||||
res = res and CH.start()
|
||||
res = res and CH.wait_ready()
|
||||
if res:
|
||||
print("ch started")
|
||||
logs_to_attach += [
|
||||
"/tmp/praktika/var/log/clickhouse-server/clickhouse-server.log",
|
||||
"/tmp/praktika/var/log/clickhouse-server/clickhouse-server.err.log",
|
||||
]
|
||||
results.append(
|
||||
Result.create_from(
|
||||
name=step_name,
|
||||
status=res,
|
||||
stopwatch=stop_watch_,
|
||||
files=(
|
||||
[
|
||||
"/tmp/praktika/var/log/clickhouse-server/clickhouse-server.log",
|
||||
"/tmp/praktika/var/log/clickhouse-server/clickhouse-server.err.log",
|
||||
]
|
||||
if not res
|
||||
else []
|
||||
),
|
||||
)
|
||||
)
|
||||
res = results[-1].is_ok()
|
||||
@ -144,7 +158,9 @@ def main():
|
||||
results[-1].set_timing(stopwatch=stop_watch_)
|
||||
res = results[-1].is_ok()
|
||||
|
||||
Result.create_from(results=results, stopwatch=stop_watch).complete_job()
|
||||
Result.create_from(
|
||||
results=results, stopwatch=stop_watch, files=logs_to_attach if not res else []
|
||||
).complete_job()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -1,5 +1,4 @@
|
||||
import threading
|
||||
import time
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from praktika.settings import Settings
|
||||
@ -39,39 +38,25 @@ class ClickHouseProc:
|
||||
Utils.set_env("CLICKHOUSE_USER_FILES", self.user_files_path)
|
||||
Utils.set_env("CLICKHOUSE_SCHEMA_FILES", f"{self.ch_config_dir}/format_schemas")
|
||||
|
||||
if not fast_test:
|
||||
with open(f"{self.ch_config_dir}/config.d/backups.xml", "w") as file:
|
||||
file.write(self.BACKUPS_XML)
|
||||
# if not fast_test:
|
||||
# with open(f"{self.ch_config_dir}/config.d/backups.xml", "w") as file:
|
||||
# file.write(self.BACKUPS_XML)
|
||||
|
||||
self.minio_proc = None
|
||||
|
||||
def start_minio(self):
|
||||
print("Starting minio")
|
||||
|
||||
def run_minio():
|
||||
self.minio_proc = Shell.run_async(
|
||||
self.minio_cmd, verbose=True, suppress_output=True
|
||||
def start_minio(self, log_file_path):
|
||||
command = ["tests/docker_scripts/setup_minio.sh", "stateless", "./tests"]
|
||||
with open(log_file_path, "w") as log_file:
|
||||
process = subprocess.Popen(
|
||||
command, stdout=log_file, stderr=subprocess.STDOUT
|
||||
)
|
||||
|
||||
thread = threading.Thread(target=run_minio)
|
||||
thread.daemon = True # Allow program to exit even if thread is still running
|
||||
thread.start()
|
||||
time.sleep(5)
|
||||
return thread.is_alive()
|
||||
print(f"Started setup_minio.sh asynchronously with PID {process.pid}")
|
||||
return True
|
||||
|
||||
def start(self):
|
||||
print("Starting ClickHouse server")
|
||||
Shell.check(f"rm {self.pid_file}")
|
||||
|
||||
def run_clickhouse():
|
||||
self.proc = Shell.run_async(
|
||||
self.command, verbose=True, suppress_output=False
|
||||
)
|
||||
|
||||
thread = threading.Thread(target=run_clickhouse)
|
||||
thread.daemon = True # Allow program to exit even if thread is still running
|
||||
thread.start()
|
||||
|
||||
self.proc = subprocess.Popen(self.command, stderr=subprocess.STDOUT, shell=True)
|
||||
started = False
|
||||
try:
|
||||
for _ in range(5):
|
||||
|
@ -30,7 +30,6 @@ class _Environment(MetaClasses.Serializable):
|
||||
INSTANCE_ID: str
|
||||
INSTANCE_LIFE_CYCLE: str
|
||||
LOCAL_RUN: bool = False
|
||||
PARAMETER: Any = None
|
||||
REPORT_INFO: List[str] = dataclasses.field(default_factory=list)
|
||||
name = "environment"
|
||||
|
||||
@ -172,18 +171,15 @@ class _Environment(MetaClasses.Serializable):
|
||||
|
||||
# TODO: find a better place for the function. This file should not import praktika.settings
|
||||
# as it's requires reading users config, that's why imports nested inside the function
|
||||
def get_report_url(self):
|
||||
def get_report_url(self, settings):
|
||||
import urllib
|
||||
|
||||
from praktika.settings import Settings
|
||||
from praktika.utils import Utils
|
||||
|
||||
path = Settings.HTML_S3_PATH
|
||||
for bucket, endpoint in Settings.S3_BUCKET_TO_HTTP_ENDPOINT.items():
|
||||
path = settings.HTML_S3_PATH
|
||||
for bucket, endpoint in settings.S3_BUCKET_TO_HTTP_ENDPOINT.items():
|
||||
if bucket in path:
|
||||
path = path.replace(bucket, endpoint)
|
||||
break
|
||||
REPORT_URL = f"https://{path}/{Path(Settings.HTML_PAGE_FILE).name}?PR={self.PR_NUMBER}&sha={self.SHA}&name_0={urllib.parse.quote(self.WORKFLOW_NAME, safe='')}&name_1={urllib.parse.quote(self.JOB_NAME, safe='')}"
|
||||
REPORT_URL = f"https://{path}/{Path(settings.HTML_PAGE_FILE).name}?PR={self.PR_NUMBER}&sha={self.SHA}&name_0={urllib.parse.quote(self.WORKFLOW_NAME, safe='')}&name_1={urllib.parse.quote(self.JOB_NAME, safe='')}"
|
||||
return REPORT_URL
|
||||
|
||||
def is_local_run(self):
|
||||
|
@ -52,7 +52,7 @@ class CIDB:
|
||||
check_status=result.status,
|
||||
check_duration_ms=int(result.duration * 1000),
|
||||
check_start_time=Utils.timestamp_to_str(result.start_time),
|
||||
report_url=env.get_report_url(),
|
||||
report_url=env.get_report_url(settings=Settings),
|
||||
pull_request_url=env.CHANGE_URL,
|
||||
base_ref=env.BASE_BRANCH,
|
||||
base_repo=env.REPOSITORY,
|
||||
|
@ -31,6 +31,9 @@ class Digest:
|
||||
cache_key = self._hash_digest_config(config)
|
||||
|
||||
if cache_key in self.digest_cache:
|
||||
print(
|
||||
f"calc digest for job [{job_config.name}]: hash_key [{cache_key}] - from cache"
|
||||
)
|
||||
return self.digest_cache[cache_key]
|
||||
|
||||
included_files = Utils.traverse_paths(
|
||||
@ -38,12 +41,9 @@ class Digest:
|
||||
job_config.digest_config.exclude_paths,
|
||||
sorted=True,
|
||||
)
|
||||
|
||||
print(
|
||||
f"calc digest for job [{job_config.name}]: hash_key [{cache_key}], include [{len(included_files)}] files"
|
||||
)
|
||||
# Sort files to ensure consistent hash calculation
|
||||
included_files.sort()
|
||||
|
||||
# Calculate MD5 hash
|
||||
res = ""
|
||||
@ -52,11 +52,11 @@ class Digest:
|
||||
print(f"NOTE: empty digest config [{config}] - return dummy digest")
|
||||
else:
|
||||
hash_md5 = hashlib.md5()
|
||||
for file_path in included_files:
|
||||
res = self._calc_file_digest(file_path, hash_md5)
|
||||
assert res
|
||||
self.digest_cache[cache_key] = res
|
||||
return res
|
||||
for i, file_path in enumerate(included_files):
|
||||
hash_md5 = self._calc_file_digest(file_path, hash_md5)
|
||||
digest = hash_md5.hexdigest()[: Settings.CACHE_DIGEST_LEN]
|
||||
self.digest_cache[cache_key] = digest
|
||||
return digest
|
||||
|
||||
def calc_docker_digest(
|
||||
self,
|
||||
@ -103,10 +103,10 @@ class Digest:
|
||||
print(
|
||||
f"WARNING: No valid file resolved by link {file_path} -> {resolved_path} - skipping digest calculation"
|
||||
)
|
||||
return hash_md5.hexdigest()[: Settings.CACHE_DIGEST_LEN]
|
||||
return hash_md5
|
||||
|
||||
with open(resolved_path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
hash_md5.update(chunk)
|
||||
|
||||
return hash_md5.hexdigest()[: Settings.CACHE_DIGEST_LEN]
|
||||
return hash_md5
|
||||
|
@ -1,3 +0,0 @@
|
||||
from praktika._environment import _Environment
|
||||
|
||||
Environment = _Environment.get()
|
@ -1,6 +1,5 @@
|
||||
import dataclasses
|
||||
import json
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
@ -132,17 +131,9 @@ class HtmlRunnerHooks:
|
||||
result = Result.generate_skipped(job.name)
|
||||
results.append(result)
|
||||
summary_result = Result.generate_pending(_workflow.name, results=results)
|
||||
summary_result.aux_links.append(env.CHANGE_URL)
|
||||
summary_result.aux_links.append(env.RUN_URL)
|
||||
summary_result.links.append(env.CHANGE_URL)
|
||||
summary_result.links.append(env.RUN_URL)
|
||||
summary_result.start_time = Utils.timestamp()
|
||||
page_url = "/".join(
|
||||
["https:/", Settings.HTML_S3_PATH, str(Path(Settings.HTML_PAGE_FILE).name)]
|
||||
)
|
||||
for bucket, endpoint in Settings.S3_BUCKET_TO_HTTP_ENDPOINT.items():
|
||||
page_url = page_url.replace(bucket, endpoint)
|
||||
# TODO: add support for non-PRs (use branch?)
|
||||
page_url += f"?PR={env.PR_NUMBER}&sha=latest&name_0={urllib.parse.quote(env.WORKFLOW_NAME, safe='')}"
|
||||
summary_result.html_link = page_url
|
||||
|
||||
# clean the previous latest results in PR if any
|
||||
if env.PR_NUMBER:
|
||||
@ -152,13 +143,14 @@ class HtmlRunnerHooks:
|
||||
unlock=False,
|
||||
)
|
||||
|
||||
page_url = env.get_report_url(settings=Settings)
|
||||
print(f"CI Status page url [{page_url}]")
|
||||
|
||||
res1 = GH.post_commit_status(
|
||||
name=_workflow.name,
|
||||
status=Result.Status.PENDING,
|
||||
description="",
|
||||
url=page_url,
|
||||
url=env.get_report_url(settings=Settings),
|
||||
)
|
||||
res2 = GH.post_pr_comment(
|
||||
comment_body=f"Workflow [[{_workflow.name}]({page_url})], commit [{_Environment.get().SHA[:8]}]",
|
||||
@ -248,11 +240,11 @@ class HtmlRunnerHooks:
|
||||
)
|
||||
if workflow_result.status != old_status:
|
||||
print(
|
||||
f"Update GH commit status [{result.name}]: [{old_status} -> {workflow_result.status}], link [{workflow_result.html_link}]"
|
||||
f"Update GH commit status [{result.name}]: [{old_status} -> {workflow_result.status}]"
|
||||
)
|
||||
GH.post_commit_status(
|
||||
name=workflow_result.name,
|
||||
status=GH.convert_to_gh_status(workflow_result.status),
|
||||
description="",
|
||||
url=workflow_result.html_link,
|
||||
url=env.get_report_url(settings=Settings),
|
||||
)
|
||||
|
@ -89,6 +89,7 @@ class Job:
|
||||
), "Job.Config.provides must be empty for parametrized jobs"
|
||||
if parameter_:
|
||||
obj.parameter = parameter_
|
||||
obj.command = obj.command.format(PARAMETER=parameter_)
|
||||
if runs_on_:
|
||||
obj.runs_on = runs_on_
|
||||
if timeout_:
|
||||
|
@ -663,20 +663,20 @@
|
||||
let targetData = navigatePath(data, nameParams);
|
||||
let nest_level = nameParams.length;
|
||||
|
||||
// Add footer links from top-level Result
|
||||
if (Array.isArray(data.links) && data.links.length > 0) {
|
||||
data.links.forEach(link => {
|
||||
const a = document.createElement('a');
|
||||
a.href = link;
|
||||
a.textContent = link.split('/').pop();
|
||||
a.target = '_blank';
|
||||
footerRight.appendChild(a);
|
||||
});
|
||||
}
|
||||
|
||||
if (targetData) {
|
||||
infoElement.style.display = 'none';
|
||||
|
||||
// Handle footer links if present
|
||||
if (Array.isArray(data.aux_links) && data.aux_links.length > 0) {
|
||||
data.aux_links.forEach(link => {
|
||||
const a = document.createElement('a');
|
||||
a.href = link;
|
||||
a.textContent = link.split('/').pop();
|
||||
a.target = '_blank';
|
||||
footerRight.appendChild(a);
|
||||
});
|
||||
}
|
||||
|
||||
addStatusToStatus(targetData.status, targetData.start_time, targetData.duration)
|
||||
|
||||
// Handle links
|
||||
|
@ -14,35 +14,34 @@ def _get_workflows(name=None, file=None):
|
||||
"""
|
||||
res = []
|
||||
|
||||
with ContextManager.cd():
|
||||
directory = Path(_Settings.WORKFLOWS_DIRECTORY)
|
||||
for py_file in directory.glob("*.py"):
|
||||
if file and file not in str(py_file):
|
||||
continue
|
||||
module_name = py_file.name.removeprefix(".py")
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
module_name, f"{_Settings.WORKFLOWS_DIRECTORY}/{module_name}"
|
||||
)
|
||||
assert spec
|
||||
foo = importlib.util.module_from_spec(spec)
|
||||
assert spec.loader
|
||||
spec.loader.exec_module(foo)
|
||||
try:
|
||||
for workflow in foo.WORKFLOWS:
|
||||
if name:
|
||||
if name == workflow.name:
|
||||
print(f"Read workflow [{name}] config from [{module_name}]")
|
||||
res = [workflow]
|
||||
break
|
||||
else:
|
||||
continue
|
||||
directory = Path(_Settings.WORKFLOWS_DIRECTORY)
|
||||
for py_file in directory.glob("*.py"):
|
||||
if file and file not in str(py_file):
|
||||
continue
|
||||
module_name = py_file.name.removeprefix(".py")
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
module_name, f"{_Settings.WORKFLOWS_DIRECTORY}/{module_name}"
|
||||
)
|
||||
assert spec
|
||||
foo = importlib.util.module_from_spec(spec)
|
||||
assert spec.loader
|
||||
spec.loader.exec_module(foo)
|
||||
try:
|
||||
for workflow in foo.WORKFLOWS:
|
||||
if name:
|
||||
if name == workflow.name:
|
||||
print(f"Read workflow [{name}] config from [{module_name}]")
|
||||
res = [workflow]
|
||||
break
|
||||
else:
|
||||
res += foo.WORKFLOWS
|
||||
print(f"Read workflow configs from [{module_name}]")
|
||||
except Exception as e:
|
||||
print(
|
||||
f"WARNING: Failed to add WORKFLOWS config from [{module_name}], exception [{e}]"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
res += foo.WORKFLOWS
|
||||
print(f"Read workflow configs from [{module_name}]")
|
||||
except Exception as e:
|
||||
print(
|
||||
f"WARNING: Failed to add WORKFLOWS config from [{module_name}], exception [{e}]"
|
||||
)
|
||||
if not res:
|
||||
Utils.raise_with_error(f"Failed to find workflow [{name or file}]")
|
||||
|
||||
|
@ -342,7 +342,7 @@ def _finish_workflow(workflow, job_name):
|
||||
f"NOTE: Result for [{result.name}] has not ok status [{result.status}]"
|
||||
)
|
||||
ready_for_merge_status = Result.Status.FAILED
|
||||
failed_results.append(result.name.split("(", maxsplit=1)[0]) # cut name
|
||||
failed_results.append(result.name)
|
||||
|
||||
if failed_results:
|
||||
ready_for_merge_description = f"failed: {', '.join(failed_results)}"
|
||||
@ -362,9 +362,7 @@ def _finish_workflow(workflow, job_name):
|
||||
unlock=False,
|
||||
) # no lock - no unlock
|
||||
|
||||
Result.from_fs(job_name).set_status(Result.Status.SUCCESS).set_info(
|
||||
ready_for_merge_description
|
||||
)
|
||||
Result.from_fs(job_name).set_status(Result.Status.SUCCESS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -1,8 +0,0 @@
|
||||
from praktika._environment import _Environment
|
||||
|
||||
|
||||
# TODO: find better place and/or right storage for parameter
|
||||
def get_param():
|
||||
env = _Environment.get()
|
||||
assert env.PARAMETER
|
||||
return env.PARAMETER
|
@ -26,10 +26,6 @@ class Result(MetaClasses.Serializable):
|
||||
files (List[str]): A list of file paths or names related to the result.
|
||||
links (List[str]): A list of URLs related to the result (e.g., links to reports or resources).
|
||||
info (str): Additional information about the result. Free-form text.
|
||||
# TODO: rename
|
||||
aux_links (List[str]): A list of auxiliary links that provide additional context for the result.
|
||||
# TODO: remove
|
||||
html_link (str): A direct link to an HTML representation of the result (e.g., a detailed report page).
|
||||
|
||||
Inner Class:
|
||||
Status: Defines possible statuses for the task, such as "success", "failure", etc.
|
||||
@ -51,8 +47,6 @@ class Result(MetaClasses.Serializable):
|
||||
files: List[str] = dataclasses.field(default_factory=list)
|
||||
links: List[str] = dataclasses.field(default_factory=list)
|
||||
info: str = ""
|
||||
aux_links: List[str] = dataclasses.field(default_factory=list)
|
||||
html_link: str = ""
|
||||
|
||||
@staticmethod
|
||||
def create_from(
|
||||
|
@ -80,7 +80,6 @@ class Runner:
|
||||
print("Read GH Environment")
|
||||
env = _Environment.from_env()
|
||||
env.JOB_NAME = job.name
|
||||
env.PARAMETER = job.parameter
|
||||
env.dump()
|
||||
print(env)
|
||||
|
||||
@ -128,7 +127,6 @@ class Runner:
|
||||
# re-set envs for local run
|
||||
env = _Environment.get()
|
||||
env.JOB_NAME = job.name
|
||||
env.PARAMETER = job.parameter
|
||||
env.dump()
|
||||
|
||||
if param:
|
||||
@ -143,6 +141,7 @@ class Runner:
|
||||
job.run_in_docker.split("+")[1:],
|
||||
)
|
||||
from_root = "root" in docker_settings
|
||||
settings = [s for s in docker_settings if s.startswith("--")]
|
||||
if ":" in job.run_in_docker:
|
||||
docker_name, docker_tag = job.run_in_docker.split(":")
|
||||
print(
|
||||
@ -154,9 +153,11 @@ class Runner:
|
||||
RunConfig.from_fs(workflow.name).digest_dockers[job.run_in_docker],
|
||||
)
|
||||
docker = docker or f"{docker_name}:{docker_tag}"
|
||||
cmd = f"docker run --rm --name praktika {'--user $(id -u):$(id -g)' if not from_root else ''} -e PYTHONPATH='{Settings.DOCKER_WD}:{Settings.DOCKER_WD}/ci' --volume ./:{Settings.DOCKER_WD} --volume {Settings.TEMP_DIR}:{Settings.TEMP_DIR} --workdir={Settings.DOCKER_WD} {docker} {job.command}"
|
||||
cmd = f"docker run --rm --name praktika {'--user $(id -u):$(id -g)' if not from_root else ''} -e PYTHONPATH='{Settings.DOCKER_WD}:{Settings.DOCKER_WD}/ci' --volume ./:{Settings.DOCKER_WD} --volume {Settings.TEMP_DIR}:{Settings.TEMP_DIR} --workdir={Settings.DOCKER_WD} {' '.join(settings)} {docker} {job.command}"
|
||||
else:
|
||||
cmd = job.command
|
||||
python_path = os.getenv("PYTHONPATH", ":")
|
||||
os.environ["PYTHONPATH"] = f".:{python_path}"
|
||||
|
||||
if param:
|
||||
print(f"Custom --param [{param}] will be passed to job's script")
|
||||
|
@ -81,25 +81,26 @@ class MetaClasses:
|
||||
class ContextManager:
|
||||
@staticmethod
|
||||
@contextmanager
|
||||
def cd(to: Optional[Union[Path, str]] = None) -> Iterator[None]:
|
||||
def cd(to: Optional[Union[Path, str]]) -> Iterator[None]:
|
||||
"""
|
||||
changes current working directory to @path or `git root` if @path is None
|
||||
:param to:
|
||||
:return:
|
||||
"""
|
||||
if not to:
|
||||
try:
|
||||
to = Shell.get_output_or_raise("git rev-parse --show-toplevel")
|
||||
except:
|
||||
pass
|
||||
if not to:
|
||||
if Path(_Settings.DOCKER_WD).is_dir():
|
||||
to = _Settings.DOCKER_WD
|
||||
if not to:
|
||||
assert False, "FIX IT"
|
||||
assert to
|
||||
# if not to:
|
||||
# try:
|
||||
# to = Shell.get_output_or_raise("git rev-parse --show-toplevel")
|
||||
# except:
|
||||
# pass
|
||||
# if not to:
|
||||
# if Path(_Settings.DOCKER_WD).is_dir():
|
||||
# to = _Settings.DOCKER_WD
|
||||
# if not to:
|
||||
# assert False, "FIX IT"
|
||||
# assert to
|
||||
old_pwd = os.getcwd()
|
||||
os.chdir(to)
|
||||
if to:
|
||||
os.chdir(to)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
|
@ -119,61 +119,58 @@ class Validator:
|
||||
def validate_file_paths_in_run_command(cls, workflow: Workflow.Config) -> None:
|
||||
if not Settings.VALIDATE_FILE_PATHS:
|
||||
return
|
||||
with ContextManager.cd():
|
||||
for job in workflow.jobs:
|
||||
run_command = job.command
|
||||
command_parts = run_command.split(" ")
|
||||
for part in command_parts:
|
||||
if ">" in part:
|
||||
return
|
||||
if "/" in part:
|
||||
assert (
|
||||
Path(part).is_file() or Path(part).is_dir()
|
||||
), f"Apparently run command [{run_command}] for job [{job}] has invalid path [{part}]. Setting to disable check: VALIDATE_FILE_PATHS"
|
||||
for job in workflow.jobs:
|
||||
run_command = job.command
|
||||
command_parts = run_command.split(" ")
|
||||
for part in command_parts:
|
||||
if ">" in part:
|
||||
return
|
||||
if "/" in part:
|
||||
assert (
|
||||
Path(part).is_file() or Path(part).is_dir()
|
||||
), f"Apparently run command [{run_command}] for job [{job}] has invalid path [{part}]. Setting to disable check: VALIDATE_FILE_PATHS"
|
||||
|
||||
@classmethod
|
||||
def validate_file_paths_in_digest_configs(cls, workflow: Workflow.Config) -> None:
|
||||
if not Settings.VALIDATE_FILE_PATHS:
|
||||
return
|
||||
with ContextManager.cd():
|
||||
for job in workflow.jobs:
|
||||
if not job.digest_config:
|
||||
continue
|
||||
for include_path in chain(
|
||||
job.digest_config.include_paths, job.digest_config.exclude_paths
|
||||
):
|
||||
if "*" in include_path:
|
||||
assert glob.glob(
|
||||
include_path, recursive=True
|
||||
), f"Apparently file glob [{include_path}] in job [{job.name}] digest_config [{job.digest_config}] invalid, workflow [{workflow.name}]. Setting to disable check: VALIDATE_FILE_PATHS"
|
||||
else:
|
||||
assert (
|
||||
Path(include_path).is_file() or Path(include_path).is_dir()
|
||||
), f"Apparently file path [{include_path}] in job [{job.name}] digest_config [{job.digest_config}] invalid, workflow [{workflow.name}]. Setting to disable check: VALIDATE_FILE_PATHS"
|
||||
for job in workflow.jobs:
|
||||
if not job.digest_config:
|
||||
continue
|
||||
for include_path in chain(
|
||||
job.digest_config.include_paths, job.digest_config.exclude_paths
|
||||
):
|
||||
if "*" in include_path:
|
||||
assert glob.glob(
|
||||
include_path, recursive=True
|
||||
), f"Apparently file glob [{include_path}] in job [{job.name}] digest_config [{job.digest_config}] invalid, workflow [{workflow.name}]. Setting to disable check: VALIDATE_FILE_PATHS"
|
||||
else:
|
||||
assert (
|
||||
Path(include_path).is_file() or Path(include_path).is_dir()
|
||||
), f"Apparently file path [{include_path}] in job [{job.name}] digest_config [{job.digest_config}] invalid, workflow [{workflow.name}]. Setting to disable check: VALIDATE_FILE_PATHS"
|
||||
|
||||
@classmethod
|
||||
def validate_requirements_txt_files(cls, workflow: Workflow.Config) -> None:
|
||||
with ContextManager.cd():
|
||||
for job in workflow.jobs:
|
||||
if job.job_requirements:
|
||||
if job.job_requirements.python_requirements_txt:
|
||||
path = Path(job.job_requirements.python_requirements_txt)
|
||||
message = f"File with py requirement [{path}] does not exist"
|
||||
if job.name in (
|
||||
Settings.DOCKER_BUILD_JOB_NAME,
|
||||
Settings.CI_CONFIG_JOB_NAME,
|
||||
Settings.FINISH_WORKFLOW_JOB_NAME,
|
||||
):
|
||||
message += '\n If all requirements already installed on your runners - add setting INSTALL_PYTHON_REQS_FOR_NATIVE_JOBS""'
|
||||
message += "\n If requirements needs to be installed - add requirements file (Settings.INSTALL_PYTHON_REQS_FOR_NATIVE_JOBS):"
|
||||
message += "\n echo jwt==1.3.1 > ./ci/requirements.txt"
|
||||
message += (
|
||||
"\n echo requests==2.32.3 >> ./ci/requirements.txt"
|
||||
)
|
||||
message += "\n echo https://clickhouse-builds.s3.amazonaws.com/packages/praktika-0.1-py3-none-any.whl >> ./ci/requirements.txt"
|
||||
cls.evaluate_check(
|
||||
path.is_file(), message, job.name, workflow.name
|
||||
for job in workflow.jobs:
|
||||
if job.job_requirements:
|
||||
if job.job_requirements.python_requirements_txt:
|
||||
path = Path(job.job_requirements.python_requirements_txt)
|
||||
message = f"File with py requirement [{path}] does not exist"
|
||||
if job.name in (
|
||||
Settings.DOCKER_BUILD_JOB_NAME,
|
||||
Settings.CI_CONFIG_JOB_NAME,
|
||||
Settings.FINISH_WORKFLOW_JOB_NAME,
|
||||
):
|
||||
message += '\n If all requirements already installed on your runners - add setting INSTALL_PYTHON_REQS_FOR_NATIVE_JOBS""'
|
||||
message += "\n If requirements needs to be installed - add requirements file (Settings.INSTALL_PYTHON_REQS_FOR_NATIVE_JOBS):"
|
||||
message += "\n echo jwt==1.3.1 > ./ci/requirements.txt"
|
||||
message += (
|
||||
"\n echo requests==2.32.3 >> ./ci/requirements.txt"
|
||||
)
|
||||
message += "\n echo https://clickhouse-builds.s3.amazonaws.com/packages/praktika-0.1-py3-none-any.whl >> ./ci/requirements.txt"
|
||||
cls.evaluate_check(
|
||||
path.is_file(), message, job.name, workflow.name
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate_dockers(cls, workflow: Workflow.Config):
|
||||
|
@ -81,8 +81,7 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clear-repository: true
|
||||
ref: ${{{{ github.event.pull_reguest.head.sha }}}}
|
||||
ref: ${{{{ github.head_ref }}}}
|
||||
{JOB_ADDONS}
|
||||
- name: Prepare env script
|
||||
run: |
|
||||
@ -191,12 +190,10 @@ jobs:
|
||||
False
|
||||
), f"Workflow event not yet supported [{workflow_config.event}]"
|
||||
|
||||
with ContextManager.cd():
|
||||
with open(self._get_workflow_file_name(workflow_config.name), "w") as f:
|
||||
f.write(yaml_workflow_str)
|
||||
with open(self._get_workflow_file_name(workflow_config.name), "w") as f:
|
||||
f.write(yaml_workflow_str)
|
||||
|
||||
with ContextManager.cd():
|
||||
Shell.check("git add ./.github/workflows/*.yaml")
|
||||
Shell.check("git add ./.github/workflows/*.yaml")
|
||||
|
||||
|
||||
class PullRequestPushYamlGen:
|
||||
|
@ -8,7 +8,7 @@ class RunnerLabels:
|
||||
CI_SERVICES = "ci_services"
|
||||
CI_SERVICES_EBS = "ci_services_ebs"
|
||||
BUILDER = "builder"
|
||||
STYLE_CHECKER = "style-checker"
|
||||
FUNC_TESTER_AMD = "func-tester"
|
||||
|
||||
|
||||
BASE_BRANCH = "master"
|
||||
@ -238,5 +238,4 @@ class JobNames:
|
||||
STYLE_CHECK = "Style Check"
|
||||
FAST_TEST = "Fast test"
|
||||
BUILD = "Build"
|
||||
BUILD_AMD_DEBUG = "Build (amd, debug)"
|
||||
STATELESS_TESTS = "Stateless tests (amd, debug)"
|
||||
STATELESS = "Stateless tests"
|
||||
|
@ -41,8 +41,9 @@ fast_test_job = Job.Config(
|
||||
amd_build_jobs = Job.Config(
|
||||
name=JobNames.BUILD,
|
||||
runs_on=[RunnerLabels.BUILDER],
|
||||
command="python3 ./ci/jobs/build_clickhouse.py",
|
||||
command="python3 ./ci/jobs/build_clickhouse.py --build-type {PARAMETER}",
|
||||
run_in_docker="clickhouse/fasttest",
|
||||
timeout=3600 * 2,
|
||||
digest_config=Job.CacheDigestConfig(
|
||||
include_paths=[
|
||||
"./src",
|
||||
@ -55,6 +56,7 @@ amd_build_jobs = Job.Config(
|
||||
"./docker/packager/packager",
|
||||
"./rust",
|
||||
"./tests/ci/version_helper.py",
|
||||
"./ci/jobs/build_clickhouse.py",
|
||||
],
|
||||
),
|
||||
).parametrize(
|
||||
@ -62,27 +64,53 @@ amd_build_jobs = Job.Config(
|
||||
provides=[[ArtifactNames.CH_AMD_DEBUG], [ArtifactNames.CH_AMD_RELEASE]],
|
||||
)
|
||||
|
||||
statless_batch_num = 2
|
||||
stateless_tests_amd_debug_jobs = Job.Config(
|
||||
name=JobNames.STATELESS_TESTS,
|
||||
stateless_tests_jobs = Job.Config(
|
||||
name=JobNames.STATELESS,
|
||||
runs_on=[RunnerLabels.BUILDER],
|
||||
command="python3 ./ci/jobs/functional_stateless_tests.py amd_debug",
|
||||
run_in_docker="clickhouse/stateless-test",
|
||||
command="python3 ./ci/jobs/functional_stateless_tests.py --test-options {PARAMETER}",
|
||||
run_in_docker="clickhouse/stateless-test+--security-opt seccomp=unconfined+--volume=/tmp/praktika:/var/lib/clickhouse",
|
||||
digest_config=Job.CacheDigestConfig(
|
||||
include_paths=[
|
||||
"./ci/jobs/functional_stateless_tests.py",
|
||||
],
|
||||
),
|
||||
requires=[ArtifactNames.CH_AMD_DEBUG],
|
||||
).parametrize(
|
||||
parameter=[
|
||||
f"parallel {i+1}/{statless_batch_num}" for i in range(statless_batch_num)
|
||||
]
|
||||
+ ["non-parallel"],
|
||||
runs_on=[[RunnerLabels.BUILDER] for _ in range(statless_batch_num)]
|
||||
+ [[RunnerLabels.STYLE_CHECKER]],
|
||||
"amd_debug,parallel",
|
||||
"amd_debug,non-parallel",
|
||||
"amd_release,parallel",
|
||||
"amd_release,non-parallel",
|
||||
],
|
||||
runs_on=[
|
||||
[RunnerLabels.BUILDER],
|
||||
[RunnerLabels.FUNC_TESTER_AMD],
|
||||
[RunnerLabels.BUILDER],
|
||||
[RunnerLabels.FUNC_TESTER_AMD],
|
||||
],
|
||||
requires=[
|
||||
[ArtifactNames.CH_AMD_DEBUG],
|
||||
[ArtifactNames.CH_AMD_DEBUG],
|
||||
[ArtifactNames.CH_AMD_RELEASE],
|
||||
[ArtifactNames.CH_AMD_RELEASE],
|
||||
],
|
||||
)
|
||||
|
||||
# stateless_tests_amd_release_jobs = Job.Config(
|
||||
# name=JobNames.STATELESS_AMD_RELEASE,
|
||||
# runs_on=[RunnerLabels.BUILDER],
|
||||
# command="python3 ./ci/jobs/functional_stateless_tests.py --test-options {PARAMETER}",
|
||||
# run_in_docker="clickhouse/stateless-test+--security-opt seccomp=unconfined+--volume=/tmp/praktika:/var/lib/clickhouse",
|
||||
# digest_config=Job.CacheDigestConfig(
|
||||
# include_paths=[
|
||||
# "./ci/jobs/functional_stateless_tests.py",
|
||||
# ],
|
||||
# ),
|
||||
# requires=[ArtifactNames.CH_AMD_RELEASE],
|
||||
# ).parametrize(
|
||||
# parameter=["parallel", "non-parallel"],
|
||||
# runs_on=[[RunnerLabels.BUILDER], [RunnerLabels.FUNC_TESTER_AMD]],
|
||||
# )
|
||||
|
||||
workflow = Workflow.Config(
|
||||
name="PR",
|
||||
event=Workflow.Event.PULL_REQUEST,
|
||||
@ -91,7 +119,7 @@ workflow = Workflow.Config(
|
||||
style_check_job,
|
||||
fast_test_job,
|
||||
*amd_build_jobs,
|
||||
*stateless_tests_amd_debug_jobs,
|
||||
*stateless_tests_jobs,
|
||||
],
|
||||
artifacts=[
|
||||
Artifact.Config(
|
||||
|
@ -2619,14 +2619,14 @@ def run_tests_process(*args, **kwargs):
|
||||
|
||||
|
||||
def do_run_tests(jobs, test_suite: TestSuite):
|
||||
if jobs > 1 and len(test_suite.parallel_tests) > 0:
|
||||
print(
|
||||
"Found",
|
||||
len(test_suite.parallel_tests),
|
||||
"parallel tests and",
|
||||
len(test_suite.sequential_tests),
|
||||
"sequential tests",
|
||||
)
|
||||
print(
|
||||
"Found",
|
||||
len(test_suite.parallel_tests),
|
||||
"parallel tests and",
|
||||
len(test_suite.sequential_tests),
|
||||
"sequential tests",
|
||||
)
|
||||
if test_suite.parallel_tests:
|
||||
tests_n = len(test_suite.parallel_tests)
|
||||
jobs = min(jobs, tests_n)
|
||||
|
||||
@ -2639,6 +2639,7 @@ def do_run_tests(jobs, test_suite: TestSuite):
|
||||
# It makes it more difficult to detect real flaky tests,
|
||||
# because the distribution and the amount
|
||||
# of failures will be nearly the same for all tests from the group.
|
||||
# TODO: add shuffle for sequential tests
|
||||
random.shuffle(test_suite.parallel_tests)
|
||||
|
||||
batch_size = len(test_suite.parallel_tests) // jobs
|
||||
@ -2684,6 +2685,7 @@ def do_run_tests(jobs, test_suite: TestSuite):
|
||||
if not p.is_alive():
|
||||
processes.remove(p)
|
||||
|
||||
if test_suite.sequential_tests:
|
||||
run_tests_array(
|
||||
(
|
||||
test_suite.sequential_tests,
|
||||
@ -2693,17 +2695,7 @@ def do_run_tests(jobs, test_suite: TestSuite):
|
||||
)
|
||||
)
|
||||
|
||||
return len(test_suite.sequential_tests) + len(test_suite.parallel_tests)
|
||||
num_tests = len(test_suite.all_tests)
|
||||
run_tests_array(
|
||||
(
|
||||
test_suite.all_tests,
|
||||
num_tests,
|
||||
test_suite,
|
||||
False,
|
||||
)
|
||||
)
|
||||
return num_tests
|
||||
return len(test_suite.sequential_tests) + len(test_suite.parallel_tests)
|
||||
|
||||
|
||||
def is_test_from_dir(suite_dir, case):
|
||||
|
@ -200,7 +200,7 @@ elif [[ "$USE_AZURE_STORAGE_FOR_MERGE_TREE" == "1" ]]; then
|
||||
fi
|
||||
|
||||
if [[ "$EXPORT_S3_STORAGE_POLICIES" == "1" ]] || [[ "$S3_STORAGE" = "1" ]]; then
|
||||
ln -sf $SRC_PATH/config.d/azure_storage_conf.xml $DEST_SERVER_PATH/config.d/
|
||||
#ln -sf $SRC_PATH/config.d/azure_storage_conf.xml $DEST_SERVER_PATH/config.d/
|
||||
ln -sf $SRC_PATH/config.d/storage_conf.xml $DEST_SERVER_PATH/config.d/
|
||||
ln -sf $SRC_PATH/config.d/storage_conf_02944.xml $DEST_SERVER_PATH/config.d/
|
||||
ln -sf $SRC_PATH/config.d/storage_conf_02963.xml $DEST_SERVER_PATH/config.d/
|
||||
|
@ -4,8 +4,10 @@ set -euxf -o pipefail
|
||||
|
||||
export MINIO_ROOT_USER=${MINIO_ROOT_USER:-clickhouse}
|
||||
export MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-clickhouse}
|
||||
TEST_DIR=${2:-/repo/tests/}
|
||||
|
||||
if [ -d "$TEMP_DIR" ]; then
|
||||
TEST_DIR=$(readlink -f $TEST_DIR)
|
||||
cd "$TEMP_DIR"
|
||||
# add / for minio mc in docker
|
||||
PATH="/:.:$PATH"
|
||||
@ -79,7 +81,7 @@ start_minio() {
|
||||
pwd
|
||||
mkdir -p ./minio_data
|
||||
minio --version
|
||||
minio server --address ":11111" ./minio_data &
|
||||
nohup minio server --address ":11111" ./minio_data &
|
||||
wait_for_it
|
||||
lsof -i :11111
|
||||
sleep 5
|
||||
@ -153,7 +155,7 @@ main() {
|
||||
fi
|
||||
start_minio
|
||||
setup_minio "$1"
|
||||
upload_data "${query_dir}" "${2:-/repo/tests/}"
|
||||
upload_data "${query_dir}" "$TEST_DIR"
|
||||
setup_aws_credentials
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user