From aeffae571c91909a7196bec6e952026932c43cc6 Mon Sep 17 00:00:00 2001 From: Max Kainov Date: Sat, 26 Oct 2024 19:35:33 +0200 Subject: [PATCH] CI: Functional Tests with praktika --- .github/workflows/pr.yaml | 287 ++++++++++++++++++++ .github/workflows/pull_request.yml | 212 --------------- ci/jobs/build_clickhouse.py | 12 +- ci/jobs/check_style.py | 2 +- ci/jobs/fast_test.py | 2 +- ci/jobs/functional_stateless_tests.py | 48 ++++ ci/jobs/scripts/functional_tests_results.py | 3 +- ci/praktika/__main__.py | 23 +- ci/praktika/_environment.py | 3 +- ci/praktika/_settings.py | 3 +- ci/praktika/hook_cache.py | 6 +- ci/praktika/json.html | 50 +++- ci/praktika/result.py | 2 +- ci/praktika/runner.py | 56 ++-- ci/praktika/yaml_generator.py | 6 +- ci/settings/definitions.py | 1 + ci/settings/settings.py | 2 + ci/workflows/pull_request.py | 16 +- 18 files changed, 477 insertions(+), 257 deletions(-) create mode 100644 .github/workflows/pr.yaml delete mode 100644 .github/workflows/pull_request.yml create mode 100644 ci/jobs/functional_stateless_tests.py diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml new file mode 100644 index 00000000000..34c794f6088 --- /dev/null +++ b/.github/workflows/pr.yaml @@ -0,0 +1,287 @@ +# generated by praktika + +name: PR + +on: + pull_request: + branches: ['master'] + +# Cancel the previous wf run in PRs. +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + GH_TOKEN: ${{ github.token }} + +# Allow updating GH commit statuses and PR comments to post an actual job reports link +permissions: write-all + +jobs: + + config_workflow: + runs-on: [ci_services] + needs: [] + name: "Config Workflow" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Config Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Config Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + docker_builds: + runs-on: [ci_services_ebs] + needs: [config_workflow] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RG9ja2VyIEJ1aWxkcw==') }} + name: "Docker Builds" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Docker Builds''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Docker Builds''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + style_check: + runs-on: [ci_services] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3R5bGUgQ2hlY2s=') }} + name: "Style Check" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Style Check''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Style Check''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + fast_test: + runs-on: [builder] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'RmFzdCB0ZXN0') }} + name: "Fast test" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Fast test''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Fast test''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + build_amd64_debug: + runs-on: [builder] + needs: [config_workflow, docker_builds] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'QnVpbGQgYW1kNjQgZGVidWc=') }} + name: "Build amd64 debug" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Build amd64 debug''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Build amd64 debug''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + stateless_tests_amd_debug: + runs-on: [builder] + needs: [config_workflow, docker_builds, build_amd64_debug] + if: ${{ !failure() && !cancelled() && !contains(fromJson(needs.config_workflow.outputs.data).cache_success_base64, 'U3RhdGVsZXNzIHRlc3RzIChhbWQsIGRlYnVnKQ==') }} + name: "Stateless tests (amd, debug)" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Stateless tests (amd, debug)''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Stateless tests (amd, debug)''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi + + finish_workflow: + runs-on: [ci_services] + needs: [config_workflow, docker_builds, style_check, fast_test, build_amd64_debug, stateless_tests_amd_debug] + if: ${{ !cancelled() }} + name: "Finish Workflow" + outputs: + data: ${{ steps.run.outputs.DATA }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Prepare env script + run: | + cat > /tmp/praktika_setup_env.sh << 'ENV_SETUP_SCRIPT_EOF' + export PYTHONPATH=./ci:. + + cat > /tmp/praktika/workflow_config_pr.json << 'EOF' + ${{ needs.config_workflow.outputs.data }} + EOF + cat > /tmp/praktika/workflow_status.json << 'EOF' + ${{ toJson(needs) }} + EOF + ENV_SETUP_SCRIPT_EOF + + rm -rf /tmp/praktika/input /tmp/praktika/output /tmp/praktika + mkdir -p /tmp/praktika /tmp/praktika/input /tmp/praktika/output + + - name: Run + id: run + run: | + . /tmp/praktika_setup_env.sh + set -o pipefail + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''Finish Workflow''' --workflow "PR" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''Finish Workflow''' --workflow "PR" --ci |& tee /tmp/praktika/praktika_run.log + fi diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml deleted file mode 100644 index e4eb44b2774..00000000000 --- a/.github/workflows/pull_request.yml +++ /dev/null @@ -1,212 +0,0 @@ -# yamllint disable rule:comments-indentation -name: PullRequestCI - -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -on: # yamllint disable-line rule:truthy - pull_request: - types: - - synchronize - - reopened - - opened - branches: - - master - -# Cancel the previous wf run in PRs. -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - RunConfig: - runs-on: [self-hosted, style-checker-aarch64] - outputs: - data: ${{ steps.runconfig.outputs.CI_DATA }} - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true # to ensure correct digests - fetch-depth: 0 # to get a version - filter: tree:0 - - name: Debug Info - uses: ./.github/actions/debug - - name: Set pending Sync status - run: | - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --set-pending-status - - name: Labels check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 run_check.py - - name: Python unit tests - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - echo "Testing the main ci directory" - python3 -m unittest discover -s . -p 'test_*.py' - - name: PrepareRunConfig - id: runconfig - run: | - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --outfile ${{ runner.temp }}/ci_run_data.json - - echo "::group::CI configuration" - python3 -m json.tool ${{ runner.temp }}/ci_run_data.json - echo "::endgroup::" - - { - echo 'CI_DATA<> "$GITHUB_OUTPUT" - - name: Re-create GH statuses for skipped jobs if any - run: | - python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ runner.temp }}/ci_run_data.json --update-gh-statuses - BuildDockers: - needs: [RunConfig] - if: ${{ !failure() && !cancelled() && toJson(fromJson(needs.RunConfig.outputs.data).docker_data.missing_multi) != '[]' }} - uses: ./.github/workflows/docker_test_images.yml - with: - data: ${{ needs.RunConfig.outputs.data }} - StyleCheck: - needs: [RunConfig, BuildDockers] - if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Style check')}} - uses: ./.github/workflows/reusable_test.yml - with: - test_name: Style check - runner_type: style-checker-aarch64 - run_command: | - python3 style_check.py - data: ${{ needs.RunConfig.outputs.data }} - secrets: - secret_envs: | - ROBOT_CLICKHOUSE_SSH_KEY< "$WORKFLOW_RESULT_FILE" << 'EOF' - ${{ toJson(needs) }} - EOF - python3 merge_pr.py --set-ci-status - - name: Check Workflow results - uses: ./.github/actions/check_workflow - with: - needs: ${{ toJson(needs) }} - - ################################# Stage Final ################################# - # - FinishCheck: - if: ${{ !failure() && !cancelled() }} - needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2_ww, Tests_2] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - filter: tree:0 - - name: Finish label - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 finish_check.py --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }} - -############################################################################################# -###################################### JEPSEN TESTS ######################################### -############################################################################################# - # This is special test NOT INCLUDED in FinishCheck - # When it's skipped, all dependent tasks will be skipped too. - # DO NOT add it there - Jepsen: - # we need concurrency as the job uses dedicated instances in the cloud - concurrency: - group: jepsen - if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse Keeper Jepsen') }} - needs: [RunConfig, Builds_1] - uses: ./.github/workflows/reusable_test.yml - with: - test_name: ClickHouse Keeper Jepsen - runner_type: style-checker-aarch64 - data: ${{ needs.RunConfig.outputs.data }} diff --git a/ci/jobs/build_clickhouse.py b/ci/jobs/build_clickhouse.py index 21ed8091608..cfa358b4059 100644 --- a/ci/jobs/build_clickhouse.py +++ b/ci/jobs/build_clickhouse.py @@ -13,8 +13,14 @@ class JobStages(metaclass=MetaClasses.WithIter): def parse_args(): parser = argparse.ArgumentParser(description="ClickHouse Build Job") - parser.add_argument("BUILD_TYPE", help="Type: ") - parser.add_argument("--param", help="Optional custom job start stage", default=None) + parser.add_argument( + "BUILD_TYPE", help="Type: __" + ) + parser.add_argument( + "--param", + help="Optional user-defined job start stage (for local run)", + default=None, + ) return parser.parse_args() @@ -95,7 +101,7 @@ def main(): Shell.check(f"ls -l {build_dir}/programs/") res = results[-1].is_ok() - Result.create_from(results=results, stopwatch=stop_watch).finish_job_accordingly() + Result.create_from(results=results, stopwatch=stop_watch).complete_job() if __name__ == "__main__": diff --git a/ci/jobs/check_style.py b/ci/jobs/check_style.py index f9cdc76302d..d4b81abc92c 100644 --- a/ci/jobs/check_style.py +++ b/ci/jobs/check_style.py @@ -379,4 +379,4 @@ if __name__ == "__main__": ) ) - Result.create_from(results=results, stopwatch=stop_watch).finish_job_accordingly() + Result.create_from(results=results, stopwatch=stop_watch).complete_job() diff --git a/ci/jobs/fast_test.py b/ci/jobs/fast_test.py index 1dcd65b6ed2..dc5e1c975a6 100644 --- a/ci/jobs/fast_test.py +++ b/ci/jobs/fast_test.py @@ -330,7 +330,7 @@ def main(): CH.terminate() - Result.create_from(results=results, stopwatch=stop_watch).finish_job_accordingly() + Result.create_from(results=results, stopwatch=stop_watch).complete_job() if __name__ == "__main__": diff --git a/ci/jobs/functional_stateless_tests.py b/ci/jobs/functional_stateless_tests.py new file mode 100644 index 00000000000..dfdd5821a19 --- /dev/null +++ b/ci/jobs/functional_stateless_tests.py @@ -0,0 +1,48 @@ +import argparse + +from praktika.result import Result +from praktika.settings import Settings +from praktika.utils import MetaClasses, Shell, Utils + + +class JobStages(metaclass=MetaClasses.WithIter): + CHECKOUT_SUBMODULES = "checkout" + CMAKE = "cmake" + BUILD = "build" + + +def parse_args(): + parser = argparse.ArgumentParser(description="ClickHouse Build Job") + parser.add_argument("BUILD_TYPE", help="Type: ") + parser.add_argument("--param", help="Optional custom job start stage", default=None) + return parser.parse_args() + + +def main(): + + args = parse_args() + + stop_watch = Utils.Stopwatch() + + stages = list(JobStages) + stage = args.param or JobStages.CHECKOUT_SUBMODULES + if stage: + assert stage in JobStages, f"--param must be one of [{list(JobStages)}]" + print(f"Job will start from stage [{stage}]") + while stage in stages: + stages.pop(0) + stages.insert(0, stage) + + res = True + results = [] + + if res and JobStages.CHECKOUT_SUBMODULES in stages: + info = Shell.get_output(f"ls -l {Settings.INPUT_DIR}") + results.append(Result(name="TEST", status=Result.Status.SUCCESS, info=info)) + res = results[-1].is_ok() + + Result.create_from(results=results, stopwatch=stop_watch).complete_job() + + +if __name__ == "__main__": + main() diff --git a/ci/jobs/scripts/functional_tests_results.py b/ci/jobs/scripts/functional_tests_results.py index 5ac9d6b985d..aba3e4f7f5b 100755 --- a/ci/jobs/scripts/functional_tests_results.py +++ b/ci/jobs/scripts/functional_tests_results.py @@ -1,7 +1,6 @@ import dataclasses from typing import List -from praktika.environment import Environment from praktika.result import Result OK_SIGN = "[ OK " @@ -250,7 +249,7 @@ class FTResultsProcessor: # test_results.sort(key=test_result_comparator) return Result.create_from( - name=Environment.JOB_NAME, + name="Tests", results=test_results, status=state, files=[self.tests_output_file], diff --git a/ci/praktika/__main__.py b/ci/praktika/__main__.py index 7f472ecd9ae..fbb9f92909a 100644 --- a/ci/praktika/__main__.py +++ b/ci/praktika/__main__.py @@ -37,6 +37,24 @@ def create_parser(): type=str, default=None, ) + run_parser.add_argument( + "--pr", + help="PR number. Optional parameter for local run. Set if you want an required artifact to be uploaded from CI run in that PR", + type=int, + default=None, + ) + run_parser.add_argument( + "--sha", + help="Commit sha. Optional parameter for local run. Set if you want an required artifact to be uploaded from CI run on that sha, head sha will be used if not set", + type=str, + default=None, + ) + run_parser.add_argument( + "--branch", + help="Commit sha. Optional parameter for local run. Set if you want an required artifact to be uploaded from CI run on that branch, main branch name will be used if not set", + type=str, + default=None, + ) run_parser.add_argument( "--ci", help="When not set - dummy env will be generated, for local test", @@ -85,9 +103,12 @@ if __name__ == "__main__": workflow=workflow, job=job, docker=args.docker, - dummy_env=not args.ci, + local_run=not args.ci, no_docker=args.no_docker, param=args.param, + pr=args.pr, + branch=args.branch, + sha=args.sha, ) else: parser.print_help() diff --git a/ci/praktika/_environment.py b/ci/praktika/_environment.py index ce9c6f5b486..4ac8ad319f9 100644 --- a/ci/praktika/_environment.py +++ b/ci/praktika/_environment.py @@ -159,7 +159,8 @@ class _Environment(MetaClasses.Serializable): @classmethod def get_s3_prefix_static(cls, pr_number, branch, sha, latest=False): prefix = "" - if pr_number > 0: + assert sha or latest + if pr_number and pr_number > 0: prefix += f"{pr_number}" else: prefix += f"{branch}" diff --git a/ci/praktika/_settings.py b/ci/praktika/_settings.py index 3052d8ef877..1777257f484 100644 --- a/ci/praktika/_settings.py +++ b/ci/praktika/_settings.py @@ -1,5 +1,4 @@ import dataclasses -from pathlib import Path from typing import Dict, Iterable, List, Optional @@ -8,6 +7,7 @@ class _Settings: ###################################### # Pipeline generation settings # ###################################### + MAIN_BRANCH = "main" CI_PATH = "./ci" WORKFLOW_PATH_PREFIX: str = "./.github/workflows" WORKFLOWS_DIRECTORY: str = f"{CI_PATH}/workflows" @@ -111,6 +111,7 @@ _USER_DEFINED_SETTINGS = [ "CI_DB_INSERT_TIMEOUT_SEC", "SECRET_GH_APP_PEM_KEY", "SECRET_GH_APP_ID", + "MAIN_BRANCH", ] diff --git a/ci/praktika/hook_cache.py b/ci/praktika/hook_cache.py index b1b5c654f20..5cfedec0144 100644 --- a/ci/praktika/hook_cache.py +++ b/ci/praktika/hook_cache.py @@ -8,11 +8,9 @@ from praktika.utils import Utils class CacheRunnerHooks: @classmethod - def configure(cls, _workflow): - workflow_config = RunConfig.from_fs(_workflow.name) + def configure(cls, workflow): + workflow_config = RunConfig.from_fs(workflow.name) cache = Cache() - assert _Environment.get().WORKFLOW_NAME - workflow = _get_workflows(name=_Environment.get().WORKFLOW_NAME)[0] print(f"Workflow Configure, workflow [{workflow.name}]") assert ( workflow.enable_cache diff --git a/ci/praktika/json.html b/ci/praktika/json.html index 2f8c3e45d0b..af03ed702f8 100644 --- a/ci/praktika/json.html +++ b/ci/praktika/json.html @@ -89,6 +89,17 @@ letter-spacing: -0.5px; } + .dropdown-value { + width: 100px; + font-weight: normal; + font-family: inherit; + background-color: transparent; + color: inherit; + /*border: none;*/ + /*outline: none;*/ + /*cursor: pointer;*/ + } + #result-container { background-color: var(--tile-background); margin-left: calc(var(--status-width) + 20px); @@ -282,6 +293,12 @@ } } + function updateUrlParameter(paramName, paramValue) { + const url = new URL(window.location.href); + url.searchParams.set(paramName, paramValue); + window.location.href = url.toString(); + } + // Attach the toggle function to the click event of the icon document.getElementById('theme-toggle').addEventListener('click', toggleTheme); @@ -291,14 +308,14 @@ const monthNames = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; const month = monthNames[date.getMonth()]; - const year = date.getFullYear(); + //const year = date.getFullYear(); const hours = String(date.getHours()).padStart(2, '0'); const minutes = String(date.getMinutes()).padStart(2, '0'); const seconds = String(date.getSeconds()).padStart(2, '0'); //const milliseconds = String(date.getMilliseconds()).padStart(2, '0'); return showDate - ? `${day}-${month}-${year} ${hours}:${minutes}:${seconds}` + ? `${day}'${month} ${hours}:${minutes}:${seconds}` : `${hours}:${minutes}:${seconds}`; } @@ -346,7 +363,7 @@ return 'status-other'; } - function addKeyValueToStatus(key, value) { + function addKeyValueToStatus(key, value, options = null) { const statusContainer = document.getElementById('status-container'); @@ -357,10 +374,25 @@ keyElement.className = 'json-key'; keyElement.textContent = key + ':'; - const valueElement = document.createElement('div'); - valueElement.className = 'json-value'; - valueElement.textContent = value; - + let valueElement + if (value) { + valueElement = document.createElement('div'); + valueElement.className = 'json-value'; + valueElement.textContent = value; + } else if (options) { + valueElement = document.createElement('select'); + valueElement.className = 'dropdown-value'; + valueElement.addEventListener('change', (event) => { + const selectedValue = event.target.value; + updateUrlParameter(key, selectedValue); + }); + options.forEach(optionValue => { + const option = document.createElement('option'); + option.value = optionValue; + option.textContent = optionValue; + valueElement.appendChild(option); + }); + } keyValuePair.appendChild(keyElement) keyValuePair.appendChild(valueElement) statusContainer.appendChild(keyValuePair); @@ -487,7 +519,7 @@ const columnSymbols = { name: '📂', - status: '✔ī¸', + status: '⏯ī¸', start_time: '🕒', duration: 'âŗ', info: 'ℹī¸', @@ -726,7 +758,7 @@ } else { console.error("TODO") } - addKeyValueToStatus("sha", sha); + addKeyValueToStatus("sha", null, [sha, 'lala']); if (nameParams[1]) { addKeyValueToStatus("job", nameParams[1]); } diff --git a/ci/praktika/result.py b/ci/praktika/result.py index 3d3c986d5f9..2ba8309ad60 100644 --- a/ci/praktika/result.py +++ b/ci/praktika/result.py @@ -318,7 +318,7 @@ class Result(MetaClasses.Serializable): files=[log_file] if log_file else None, ) - def finish_job_accordingly(self): + def complete_job(self): self.dump() if not self.is_ok(): print("ERROR: Job Failed") diff --git a/ci/praktika/runner.py b/ci/praktika/runner.py index 797a799a74d..823c7e0f36d 100644 --- a/ci/praktika/runner.py +++ b/ci/praktika/runner.py @@ -19,7 +19,7 @@ from praktika.utils import Shell, TeePopen, Utils class Runner: @staticmethod - def generate_dummy_environment(workflow, job): + def generate_local_run_environment(workflow, job, pr=None, branch=None, sha=None): print("WARNING: Generate dummy env for local test") Shell.check( f"mkdir -p {Settings.TEMP_DIR} {Settings.INPUT_DIR} {Settings.OUTPUT_DIR}" @@ -28,9 +28,9 @@ class Runner: WORKFLOW_NAME=workflow.name, JOB_NAME=job.name, REPOSITORY="", - BRANCH="", - SHA="", - PR_NUMBER=-1, + BRANCH=branch or Settings.MAIN_BRANCH if not pr else "", + SHA=sha or Shell.get_output("git rev-parse HEAD"), + PR_NUMBER=pr or -1, EVENT_TYPE="", JOB_OUTPUT_STREAM="", EVENT_FILE_PATH="", @@ -86,7 +86,7 @@ class Runner: return 0 - def _pre_run(self, workflow, job): + def _pre_run(self, workflow, job, local_run=False): env = _Environment.get() result = Result( @@ -96,9 +96,10 @@ class Runner: ) result.dump() - if workflow.enable_report and job.name != Settings.CI_CONFIG_JOB_NAME: - print("Update Job and Workflow Report") - HtmlRunnerHooks.pre_run(workflow, job) + if not local_run: + if workflow.enable_report and job.name != Settings.CI_CONFIG_JOB_NAME: + print("Update Job and Workflow Report") + HtmlRunnerHooks.pre_run(workflow, job) print("Download required artifacts") required_artifacts = [] @@ -133,11 +134,17 @@ class Runner: env.dump() if job.run_in_docker and not no_docker: - # TODO: add support for any image, including not from ci config (e.g. ubuntu:latest) - docker_tag = RunConfig.from_fs(workflow.name).digest_dockers[ - job.run_in_docker - ] - docker = docker or f"{job.run_in_docker}:{docker_tag}" + if ":" in job.run_in_docker: + docker_name, docker_tag = job.run_in_docker.split(":") + print( + f"WARNING: Job [{job.name}] use custom docker image with a tag - praktika won't control docker version" + ) + else: + docker_name, docker_tag = ( + job.run_in_docker, + RunConfig.from_fs(workflow.name).digest_dockers[job.run_in_docker], + ) + docker = docker or f"{docker_name}:{docker_tag}" cmd = f"docker run --rm --user \"$(id -u):$(id -g)\" -e PYTHONPATH='{Settings.DOCKER_WD}:{Settings.DOCKER_WD}/ci' --volume ./:{Settings.DOCKER_WD} --volume {Settings.TEMP_DIR}:{Settings.TEMP_DIR} --workdir={Settings.DOCKER_WD} {docker} {job.command}" else: cmd = job.command @@ -285,14 +292,23 @@ class Runner: return True def run( - self, workflow, job, docker="", dummy_env=False, no_docker=False, param=None + self, + workflow, + job, + docker="", + local_run=False, + no_docker=False, + param=None, + pr=None, + sha=None, + branch=None, ): res = True setup_env_code = -10 prerun_code = -10 run_code = -10 - if res and not dummy_env: + if res and not local_run: print( f"\n\n=== Setup env script [{job.name}], workflow [{workflow.name}] ===" ) @@ -309,13 +325,15 @@ class Runner: traceback.print_exc() print(f"=== Setup env finished ===\n\n") else: - self.generate_dummy_environment(workflow, job) + self.generate_local_run_environment( + workflow, job, pr=pr, branch=branch, sha=sha + ) - if res and not dummy_env: + if res: res = False print(f"=== Pre run script [{job.name}], workflow [{workflow.name}] ===") try: - prerun_code = self._pre_run(workflow, job) + prerun_code = self._pre_run(workflow, job, local_run=local_run) res = prerun_code == 0 if not res: print(f"ERROR: Pre-run failed with exit code [{prerun_code}]") @@ -339,7 +357,7 @@ class Runner: traceback.print_exc() print(f"=== Run scrip finished ===\n\n") - if not dummy_env: + if not local_run: print(f"=== Post run script [{job.name}], workflow [{workflow.name}] ===") self._post_run(workflow, job, setup_env_code, prerun_code, run_code) print(f"=== Post run scrip finished ===") diff --git a/ci/praktika/yaml_generator.py b/ci/praktika/yaml_generator.py index 00c469fec0c..fb918b4ddba 100644 --- a/ci/praktika/yaml_generator.py +++ b/ci/praktika/yaml_generator.py @@ -102,7 +102,11 @@ jobs: run: | . /tmp/praktika_setup_env.sh set -o pipefail - {PYTHON} -m praktika run --job '''{JOB_NAME}''' --workflow "{WORKFLOW_NAME}" --ci |& tee {RUN_LOG} + if command -v ts &> /dev/null; then + python3 -m praktika run --job '''{JOB_NAME}''' --workflow "{WORKFLOW_NAME}" --ci |& ts '[%Y-%m-%d %H:%M:%S]' | tee /tmp/praktika/praktika_run.log + else + python3 -m praktika run --job '''{JOB_NAME}''' --workflow "{WORKFLOW_NAME}" --ci |& tee /tmp/praktika/praktika_run.log + fi {UPLOADS_GITHUB}\ """ diff --git a/ci/settings/definitions.py b/ci/settings/definitions.py index 176e865e6f3..c67bdee015b 100644 --- a/ci/settings/definitions.py +++ b/ci/settings/definitions.py @@ -231,3 +231,4 @@ class JobNames: STYLE_CHECK = "Style Check" FAST_TEST = "Fast test" BUILD_AMD_DEBUG = "Build amd64 debug" + STATELESS_TESTS = "Stateless tests (amd, debug)" diff --git a/ci/settings/settings.py b/ci/settings/settings.py index 8d5e7bc3c87..0f3b1efcee0 100644 --- a/ci/settings/settings.py +++ b/ci/settings/settings.py @@ -4,6 +4,8 @@ from ci.settings.definitions import ( RunnerLabels, ) +MAIN_BRANCH = "master" + S3_ARTIFACT_PATH = f"{S3_BUCKET_NAME}/artifacts" CI_CONFIG_RUNS_ON = [RunnerLabels.CI_SERVICES] DOCKER_BUILD_RUNS_ON = [RunnerLabels.CI_SERVICES_EBS] diff --git a/ci/workflows/pull_request.py b/ci/workflows/pull_request.py index 74129177efb..c7715b40fca 100644 --- a/ci/workflows/pull_request.py +++ b/ci/workflows/pull_request.py @@ -59,6 +59,19 @@ job_build_amd_debug = Job.Config( provides=[ArtifactNames.ch_debug_binary], ) +stateless_tests_job = Job.Config( + name=JobNames.STATELESS_TESTS, + runs_on=[RunnerLabels.BUILDER], + command="python3 ./ci/jobs/functional_stateless_tests.py amd_debug", + run_in_docker="clickhouse/fasttest:latest", + digest_config=Job.CacheDigestConfig( + include_paths=[ + "./ci/jobs/functional_stateless_tests.py", + ], + ), + requires=[ArtifactNames.ch_debug_binary], +) + workflow = Workflow.Config( name="PR", event=Workflow.Event.PULL_REQUEST, @@ -67,6 +80,7 @@ workflow = Workflow.Config( style_check_job, fast_test_job, job_build_amd_debug, + stateless_tests_job, ], artifacts=[ Artifact.Config( @@ -91,4 +105,4 @@ if __name__ == "__main__": # local job test inside praktika environment from praktika.runner import Runner - Runner().run(workflow, fast_test_job, docker="fasttest", dummy_env=True) + Runner().run(workflow, fast_test_job, docker="fasttest", local_run=True)