mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 16:42:05 +00:00
Address f-strings and too-long-lines
This commit is contained in:
parent
b9338750cb
commit
dfe27694b3
@ -1,15 +1,16 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import time
|
import time
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
|
||||||
from shutil import copy2
|
from shutil import copy2
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from ci_utils import Shell, WithIter
|
||||||
from create_release import (
|
from create_release import (
|
||||||
PackageDownloader,
|
PackageDownloader,
|
||||||
ReleaseInfo,
|
|
||||||
ReleaseContextManager,
|
ReleaseContextManager,
|
||||||
|
ReleaseInfo,
|
||||||
ReleaseProgress,
|
ReleaseProgress,
|
||||||
)
|
)
|
||||||
from ci_utils import WithIter, Shell
|
|
||||||
|
|
||||||
|
|
||||||
class MountPointApp(metaclass=WithIter):
|
class MountPointApp(metaclass=WithIter):
|
||||||
@ -54,21 +55,27 @@ class R2MountPoint:
|
|||||||
"-o passwd_file /home/ubuntu/.passwd-s3fs_packages "
|
"-o passwd_file /home/ubuntu/.passwd-s3fs_packages "
|
||||||
)
|
)
|
||||||
# without -o nomultipart there are errors like "Error 5 writing to /home/ubuntu/***.deb: Input/output error"
|
# without -o nomultipart there are errors like "Error 5 writing to /home/ubuntu/***.deb: Input/output error"
|
||||||
self.mount_cmd = f"s3fs {self.bucket_name} {self.MOUNT_POINT} -o url={self.API_ENDPOINT} -o use_path_request_style -o umask=0000 -o nomultipart -o logfile={self.LOG_FILE} {self.aux_mount_options}"
|
self.mount_cmd = (
|
||||||
|
f"s3fs {self.bucket_name} {self.MOUNT_POINT} -o url={self.API_ENDPOINT} "
|
||||||
|
f"-o use_path_request_style -o umask=0000 -o nomultipart "
|
||||||
|
f"-o logfile={self.LOG_FILE} {self.aux_mount_options}"
|
||||||
|
)
|
||||||
elif self.app == MountPointApp.GEESEFS:
|
elif self.app == MountPointApp.GEESEFS:
|
||||||
self.cache_dir = "/home/ubuntu/geesefs_cache"
|
self.cache_dir = "/home/ubuntu/geesefs_cache"
|
||||||
self.aux_mount_options += (
|
self.aux_mount_options += (
|
||||||
f" --cache={self.cache_dir} " if self.CACHE_ENABLED else ""
|
f" --cache={self.cache_dir} " if self.CACHE_ENABLED else ""
|
||||||
)
|
)
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
self.aux_mount_options += f" --shared-config=/home/ubuntu/.r2_auth "
|
self.aux_mount_options += " --shared-config=/home/ubuntu/.r2_auth "
|
||||||
else:
|
else:
|
||||||
self.aux_mount_options += (
|
self.aux_mount_options += " --shared-config=/home/ubuntu/.r2_auth_test "
|
||||||
f" --shared-config=/home/ubuntu/.r2_auth_test "
|
|
||||||
)
|
|
||||||
if self.DEBUG:
|
if self.DEBUG:
|
||||||
self.aux_mount_options += " --debug_s3 "
|
self.aux_mount_options += " --debug_s3 "
|
||||||
self.mount_cmd = f"geesefs --endpoint={self.API_ENDPOINT} --cheap --memory-limit=1000 --gc-interval=100 --max-flushers=10 --max-parallel-parts=1 --max-parallel-copy=10 --log-file={self.LOG_FILE} {self.aux_mount_options} {self.bucket_name} {self.MOUNT_POINT}"
|
self.mount_cmd = (
|
||||||
|
f"geesefs --endpoint={self.API_ENDPOINT} --cheap --memory-limit=1000 "
|
||||||
|
f"--gc-interval=100 --max-flushers=10 --max-parallel-parts=1 --max-parallel-copy=10 "
|
||||||
|
f"--log-file={self.LOG_FILE} {self.aux_mount_options} {self.bucket_name} {self.MOUNT_POINT}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
@ -144,19 +151,32 @@ class DebianArtifactory:
|
|||||||
Shell.check(cmd, strict=True)
|
Shell.check(cmd, strict=True)
|
||||||
Shell.check("sync")
|
Shell.check("sync")
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
Shell.check(f"lsof +D R2MountPoint.MOUNT_POINT", verbose=True)
|
Shell.check("lsof +D R2MountPoint.MOUNT_POINT", verbose=True)
|
||||||
|
|
||||||
def test_packages(self):
|
def test_packages(self):
|
||||||
Shell.check("docker pull ubuntu:latest", strict=True)
|
Shell.check("docker pull ubuntu:latest", strict=True)
|
||||||
print(f"Test packages installation, version [{self.version}]")
|
print(f"Test packages installation, version [{self.version}]")
|
||||||
debian_command = f"echo 'deb {self.repo_url} stable main' | tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; apt-get install -y clickhouse-common-static={self.version} clickhouse-client={self.version}"
|
debian_command = (
|
||||||
cmd = f'docker run --rm ubuntu:latest bash -c "apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; {debian_command}"'
|
f"echo 'deb {self.repo_url} stable main' | "
|
||||||
|
"tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; "
|
||||||
|
f"apt-get install -y clickhouse-common-static={self.version} clickhouse-client={self.version}"
|
||||||
|
)
|
||||||
|
cmd = (
|
||||||
|
"docker run --rm ubuntu:latest bash -c "
|
||||||
|
f'"apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; {debian_command}"'
|
||||||
|
)
|
||||||
print("Running test command:")
|
print("Running test command:")
|
||||||
print(f" {cmd}")
|
print(f" {cmd}")
|
||||||
assert Shell.check(cmd)
|
assert Shell.check(cmd)
|
||||||
print(f"Test packages installation, version [latest]")
|
print("Test packages installation, version [latest]")
|
||||||
debian_command_2 = f"echo 'deb {self.repo_url} stable main' | tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; apt-get install -y clickhouse-common-static clickhouse-client"
|
debian_command_2 = (
|
||||||
cmd = f'docker run --rm ubuntu:latest bash -c "apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; {debian_command_2}"'
|
f"echo 'deb {self.repo_url} stable main' | "
|
||||||
|
"tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; apt-get install -y clickhouse-common-static clickhouse-client"
|
||||||
|
)
|
||||||
|
cmd = (
|
||||||
|
"docker run --rm ubuntu:latest bash -c "
|
||||||
|
f'"apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; {debian_command_2}"'
|
||||||
|
)
|
||||||
print("Running test command:")
|
print("Running test command:")
|
||||||
print(f" {cmd}")
|
print(f" {cmd}")
|
||||||
assert Shell.check(cmd)
|
assert Shell.check(cmd)
|
||||||
@ -236,7 +256,7 @@ class RpmArtifactory:
|
|||||||
print("Running test command:")
|
print("Running test command:")
|
||||||
print(f" {cmd}")
|
print(f" {cmd}")
|
||||||
assert Shell.check(cmd)
|
assert Shell.check(cmd)
|
||||||
print(f"Test package installation, version [latest]")
|
print("Test package installation, version [latest]")
|
||||||
rpm_command_2 = f"dnf config-manager --add-repo={self.repo_url} && dnf makecache && dnf -y install clickhouse-client"
|
rpm_command_2 = f"dnf config-manager --add-repo={self.repo_url} && dnf makecache && dnf -y install clickhouse-client"
|
||||||
cmd = f'docker run --rm fedora:latest /bin/bash -c "dnf -y install dnf-plugins-core && dnf config-manager --add-repo={self.repo_url} && {rpm_command_2}"'
|
cmd = f'docker run --rm fedora:latest /bin/bash -c "dnf -y install dnf-plugins-core && dnf config-manager --add-repo={self.repo_url} && {rpm_command_2}"'
|
||||||
print("Running test command:")
|
print("Running test command:")
|
||||||
@ -359,7 +379,7 @@ if __name__ == "__main__":
|
|||||||
"""
|
"""
|
||||||
S3FS - very slow with a big repo
|
S3FS - very slow with a big repo
|
||||||
RCLONE - fuse had many different errors with r2 remote and completely removed
|
RCLONE - fuse had many different errors with r2 remote and completely removed
|
||||||
GEESEFS ?
|
GEESEFS ?
|
||||||
"""
|
"""
|
||||||
mp = R2MountPoint(MountPointApp.GEESEFS, dry_run=args.dry_run)
|
mp = R2MountPoint(MountPointApp.GEESEFS, dry_run=args.dry_run)
|
||||||
if args.export_debian:
|
if args.export_debian:
|
||||||
|
@ -5,13 +5,13 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
from get_robot_token import get_best_robot_token
|
|
||||||
from github_helper import GitHub
|
|
||||||
from ci_utils import Shell
|
|
||||||
from env_helper import GITHUB_REPOSITORY
|
|
||||||
from report import SUCCESS
|
|
||||||
from ci_buddy import CIBuddy
|
from ci_buddy import CIBuddy
|
||||||
from ci_config import CI
|
from ci_config import CI
|
||||||
|
from ci_utils import Shell
|
||||||
|
from env_helper import GITHUB_REPOSITORY
|
||||||
|
from get_robot_token import get_best_robot_token
|
||||||
|
from github_helper import GitHub
|
||||||
|
from report import SUCCESS
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
@ -229,7 +229,7 @@ def main():
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
CIBuddy(dry_run=False).post_info(
|
CIBuddy(dry_run=False).post_info(
|
||||||
title=f"Autorelease completed",
|
title="Autorelease completed",
|
||||||
body="",
|
body="",
|
||||||
with_wf_link=True,
|
with_wf_link=True,
|
||||||
)
|
)
|
||||||
|
@ -467,17 +467,19 @@ def main():
|
|||||||
if branch and patch and Shell.check(f"git show-ref --quiet {branch}"):
|
if branch and patch and Shell.check(f"git show-ref --quiet {branch}"):
|
||||||
if patch > 1:
|
if patch > 1:
|
||||||
query += f" base:{branch}"
|
query += f" base:{branch}"
|
||||||
print(
|
logging.info(
|
||||||
f"NOTE: It's a patch [{patch}]. will use base branch to filter PRs [{branch}]"
|
"NOTE: It's a patch [%s]. will use base branch to filter PRs [%s]",
|
||||||
|
patch,
|
||||||
|
branch,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print(
|
logging.info(
|
||||||
f"NOTE: It's a first patch version. should count PRs merged on master - won't filter PRs by branch"
|
"NOTE: It's a first patch version. should count PRs merged on master - won't filter PRs by branch"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print(f"ERROR: invalid branch {branch} - pass")
|
logging.error("ERROR: invalid branch %s - pass", branch)
|
||||||
|
|
||||||
print(f"Fetch PRs with query {query}")
|
logging.info("Fetch PRs with query %s", query)
|
||||||
prs = gh.get_pulls_from_search(
|
prs = gh.get_pulls_from_search(
|
||||||
query=query, merged=merged, sort="created", progress_func=tqdm.tqdm
|
query=query, merged=merged, sort="created", progress_func=tqdm.tqdm
|
||||||
)
|
)
|
||||||
|
@ -995,7 +995,7 @@ def _run_test(job_name: str, run_command: str) -> int:
|
|||||||
jr = JobReport.load()
|
jr = JobReport.load()
|
||||||
if jr.dummy:
|
if jr.dummy:
|
||||||
print(
|
print(
|
||||||
f"ERROR: Run action failed with timeout and did not generate JobReport - update dummy report with execution time"
|
"ERROR: Run action failed with timeout and did not generate JobReport - update dummy report with execution time"
|
||||||
)
|
)
|
||||||
jr.test_results = [TestResult.create_check_timeout_expired()]
|
jr.test_results = [TestResult.create_check_timeout_expired()]
|
||||||
jr.duration = stopwatch.duration_seconds
|
jr.duration = stopwatch.duration_seconds
|
||||||
@ -1305,7 +1305,7 @@ def main() -> int:
|
|||||||
elif job_report.job_skipped:
|
elif job_report.job_skipped:
|
||||||
print(f"Skipped after rerun check {[args.job_name]} - do nothing")
|
print(f"Skipped after rerun check {[args.job_name]} - do nothing")
|
||||||
else:
|
else:
|
||||||
print(f"ERROR: Job was killed - generate evidence")
|
print("ERROR: Job was killed - generate evidence")
|
||||||
job_report.update_duration()
|
job_report.update_duration()
|
||||||
ret_code = os.getenv("JOB_EXIT_CODE", "")
|
ret_code = os.getenv("JOB_EXIT_CODE", "")
|
||||||
if ret_code:
|
if ret_code:
|
||||||
|
@ -352,11 +352,20 @@ class ReleaseInfo:
|
|||||||
with checkout_new(branch_upd_version_contributors):
|
with checkout_new(branch_upd_version_contributors):
|
||||||
update_cmake_version(version)
|
update_cmake_version(version)
|
||||||
update_contributors(raise_error=True)
|
update_contributors(raise_error=True)
|
||||||
cmd_commit_version_upd = f"{GIT_PREFIX} commit '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}' -m 'Update autogenerated version to {self.version} and contributors'"
|
cmd_commit_version_upd = (
|
||||||
|
f"{GIT_PREFIX} commit '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}' "
|
||||||
|
f"-m 'Update autogenerated version to {self.version} and contributors'"
|
||||||
|
)
|
||||||
cmd_push_branch = f"{GIT_PREFIX} push --set-upstream origin {branch_upd_version_contributors}"
|
cmd_push_branch = f"{GIT_PREFIX} push --set-upstream origin {branch_upd_version_contributors}"
|
||||||
actor = os.getenv("GITHUB_ACTOR", "") or "me"
|
actor = os.getenv("GITHUB_ACTOR", "") or "me"
|
||||||
body = f"Automatic version bump after release {self.release_tag}\n### Changelog category (leave one):\n- Not for changelog (changelog entry is not required)\n"
|
body = (
|
||||||
cmd_create_pr = f"gh pr create --repo {CI.Envs.GITHUB_REPOSITORY} --title 'Update version after release' --head {branch_upd_version_contributors} --base master --body \"{body}\" --assignee {actor}"
|
f"Automatic version bump after release {self.release_tag}\n"
|
||||||
|
"### Changelog category (leave one):\n- Not for changelog (changelog entry is not required)\n"
|
||||||
|
)
|
||||||
|
cmd_create_pr = (
|
||||||
|
f"gh pr create --repo {CI.Envs.GITHUB_REPOSITORY} --title 'Update version after release' "
|
||||||
|
f'--head {branch_upd_version_contributors} --base master --body "{body}" --assignee {actor}'
|
||||||
|
)
|
||||||
Shell.check(
|
Shell.check(
|
||||||
cmd_commit_version_upd,
|
cmd_commit_version_upd,
|
||||||
strict=True,
|
strict=True,
|
||||||
@ -447,7 +456,7 @@ class ReleaseInfo:
|
|||||||
else:
|
else:
|
||||||
print("Dry-run, would run commands:")
|
print("Dry-run, would run commands:")
|
||||||
print("\n * ".join(cmds))
|
print("\n * ".join(cmds))
|
||||||
self.release_url = f"dry-run"
|
self.release_url = "dry-run"
|
||||||
self.dump()
|
self.dump()
|
||||||
|
|
||||||
def merge_prs(self, dry_run: bool) -> None:
|
def merge_prs(self, dry_run: bool) -> None:
|
||||||
|
@ -247,7 +247,7 @@ def handle_sigterm(signum, _frame):
|
|||||||
print(f"WARNING: Received signal {signum}")
|
print(f"WARNING: Received signal {signum}")
|
||||||
global timeout_expired # pylint:disable=global-statement
|
global timeout_expired # pylint:disable=global-statement
|
||||||
timeout_expired = True
|
timeout_expired = True
|
||||||
Shell.check(f"docker exec func-tester pkill -f clickhouse-test", verbose=True)
|
Shell.check("docker exec func-tester pkill -f clickhouse-test", verbose=True)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -401,7 +401,7 @@ def main():
|
|||||||
failed_cnt
|
failed_cnt
|
||||||
and failed_cnt <= CI.MAX_TOTAL_FAILURES_PER_JOB_BEFORE_BLOCKING_CI
|
and failed_cnt <= CI.MAX_TOTAL_FAILURES_PER_JOB_BEFORE_BLOCKING_CI
|
||||||
):
|
):
|
||||||
print(f"Won't block the CI workflow")
|
print("Won't block the CI workflow")
|
||||||
should_block_ci = False
|
should_block_ci = False
|
||||||
|
|
||||||
if should_block_ci:
|
if should_block_ci:
|
||||||
|
@ -9,7 +9,10 @@ import sys
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List, Tuple
|
from typing import Dict, List, Tuple
|
||||||
|
|
||||||
|
import integration_tests_runner as runner
|
||||||
from build_download_helper import download_all_deb_packages
|
from build_download_helper import download_all_deb_packages
|
||||||
|
from ci_config import CI
|
||||||
|
from ci_utils import Utils
|
||||||
from docker_images_helper import DockerImage, get_docker_image
|
from docker_images_helper import DockerImage, get_docker_image
|
||||||
from download_release_packages import download_last_release
|
from download_release_packages import download_last_release
|
||||||
from env_helper import REPO_COPY, REPORT_PATH, TEMP_PATH
|
from env_helper import REPO_COPY, REPORT_PATH, TEMP_PATH
|
||||||
@ -17,20 +20,16 @@ from integration_test_images import IMAGES
|
|||||||
from pr_info import PRInfo
|
from pr_info import PRInfo
|
||||||
from report import (
|
from report import (
|
||||||
ERROR,
|
ERROR,
|
||||||
|
FAILURE,
|
||||||
SUCCESS,
|
SUCCESS,
|
||||||
StatusType,
|
|
||||||
JobReport,
|
JobReport,
|
||||||
|
StatusType,
|
||||||
TestResult,
|
TestResult,
|
||||||
TestResults,
|
TestResults,
|
||||||
read_test_results,
|
read_test_results,
|
||||||
FAILURE,
|
|
||||||
)
|
)
|
||||||
from stopwatch import Stopwatch
|
from stopwatch import Stopwatch
|
||||||
|
|
||||||
import integration_tests_runner as runner
|
|
||||||
from ci_config import CI
|
|
||||||
from ci_utils import Utils
|
|
||||||
|
|
||||||
|
|
||||||
def get_json_params_dict(
|
def get_json_params_dict(
|
||||||
check_name: str,
|
check_name: str,
|
||||||
@ -249,7 +248,7 @@ def main():
|
|||||||
failed_cnt
|
failed_cnt
|
||||||
and failed_cnt <= CI.MAX_TOTAL_FAILURES_PER_JOB_BEFORE_BLOCKING_CI
|
and failed_cnt <= CI.MAX_TOTAL_FAILURES_PER_JOB_BEFORE_BLOCKING_CI
|
||||||
):
|
):
|
||||||
print(f"Won't block the CI workflow")
|
print("Won't block the CI workflow")
|
||||||
should_block_ci = False
|
should_block_ci = False
|
||||||
|
|
||||||
if should_block_ci:
|
if should_block_ci:
|
||||||
|
Loading…
Reference in New Issue
Block a user