Merge pull request #66339 from ClickHouse/create_release_wf

Create release workflow
This commit is contained in:
Max K 2024-07-13 18:55:47 +00:00 committed by GitHub
commit 000cd64d60
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 1315 additions and 61 deletions

View File

@ -7,3 +7,4 @@ self-hosted-runner:
- stress-tester
- style-checker
- style-checker-aarch64
- release-maker

View File

@ -6,8 +6,8 @@ concurrency:
'on':
workflow_dispatch:
inputs:
sha:
description: 'The SHA hash of the commit from which to create the release'
ref:
description: 'Git reference (branch or commit sha) from which to create the release'
required: true
type: string
type:
@ -15,15 +15,152 @@ concurrency:
required: true
type: choice
options:
- new
- patch
- new
dry-run:
description: 'Dry run'
required: false
default: true
type: boolean
jobs:
Release:
runs-on: [self-hosted, style-checker-aarch64]
CreateRelease:
env:
GH_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
runs-on: [self-hosted, release-maker]
steps:
- name: DebugInfo
uses: hmarr/debug-action@f7318c783045ac39ed9bb497e22ce835fdafbfe6
- name: Set envs
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
run: |
cat >> "$GITHUB_ENV" << 'EOF'
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
RCSK
RELEASE_INFO_FILE=${{ runner.temp }}/release_info.json
EOF
- name: Check out repository code
uses: ClickHouse/checkout@v1
- name: Print greeting
with:
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
fetch-depth: 0
- name: Prepare Release Info
run: |
python3 ./tests/ci/release.py --commit ${{ inputs.sha }} --type ${{ inputs.type }} --dry-run
python3 ./tests/ci/create_release.py --prepare-release-info \
--ref ${{ inputs.ref }} --release-type ${{ inputs.type }} \
--outfile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
echo "::group::Release Info"
python3 -m json.tool "$RELEASE_INFO_FILE"
echo "::endgroup::"
release_tag=$(jq -r '.release_tag' "$RELEASE_INFO_FILE")
commit_sha=$(jq -r '.commit_sha' "$RELEASE_INFO_FILE")
echo "Release Tag: $release_tag"
echo "RELEASE_TAG=$release_tag" >> "$GITHUB_ENV"
echo "COMMIT_SHA=$commit_sha" >> "$GITHUB_ENV"
- name: Download All Release Artifacts
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/create_release.py --infile "$RELEASE_INFO_FILE" --download-packages ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Push Git Tag for the Release
run: |
python3 ./tests/ci/create_release.py --push-release-tag --infile "$RELEASE_INFO_FILE" ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Push New Release Branch
if: ${{ inputs.type == 'new' }}
run: |
python3 ./tests/ci/create_release.py --push-new-release-branch --infile "$RELEASE_INFO_FILE" ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Bump CH Version and Update Contributors' List
run: |
python3 ./tests/ci/create_release.py --create-bump-version-pr --infile "$RELEASE_INFO_FILE" ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Checkout master
run: |
git checkout master
- name: Bump Docker versions, Changelog, Security
if: ${{ inputs.type == 'patch' }}
run: |
[ "$(git branch --show-current)" != "master" ] && echo "not on the master" && exit 1
echo "List versions"
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
echo "Update docker version"
./utils/list-versions/update-docker-version.sh
echo "Generate ChangeLog"
export CI=1
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 -e CI=1 --network=host \
--volume=".:/ClickHouse" clickhouse/style-test \
/ClickHouse/tests/ci/changelog.py -v --debug-helpers \
--gh-user-or-token="$GH_TOKEN" --jobs=5 \
--output="/ClickHouse/docs/changelogs/${{ env.RELEASE_TAG }}.md" ${{ env.RELEASE_TAG }}
git add ./docs/changelogs/${{ env.RELEASE_TAG }}.md
echo "Generate Security"
python3 ./utils/security-generator/generate_security.py > SECURITY.md
git diff HEAD
- name: Generate ChangeLog
if: ${{ inputs.type == 'patch' && ! inputs.dry-run }}
uses: peter-evans/create-pull-request@v6
with:
author: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
token: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
committer: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
commit-message: Update version_date.tsv and changelogs after ${{ env.RELEASE_TAG }}
branch: auto/${{ env.RELEASE_TAG }}
assignees: ${{ github.event.sender.login }} # assign the PR to the tag pusher
delete-branch: true
title: Update version_date.tsv and changelog after ${{ env.RELEASE_TAG }}
labels: do not test
body: |
Update version_date.tsv and changelogs after ${{ env.RELEASE_TAG }}
### Changelog category (leave one):
- Not for changelog (changelog entry is not required)
- name: Reset changes if Dry-run
if: ${{ inputs.dry-run }}
run: |
git reset --hard HEAD
- name: Checkout back to GITHUB_REF
run: |
git checkout "$GITHUB_REF_NAME"
- name: Create GH Release
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/create_release.py --create-gh-release \
--infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Export TGZ Packages
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/artifactory.py --export-tgz --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Test TGZ Packages
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/artifactory.py --test-tgz --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Export RPM Packages
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/artifactory.py --export-rpm --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Test RPM Packages
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/artifactory.py --test-rpm --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Export Debian Packages
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/artifactory.py --export-debian --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Test Debian Packages
if: ${{ inputs.type == 'patch' }}
run: |
python3 ./tests/ci/artifactory.py --test-debian --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
- name: Docker clickhouse/clickhouse-server building
if: ${{ inputs.type == 'patch' }}
run: |
cd "./tests/ci"
export CHECK_NAME="Docker server image"
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
- name: Docker clickhouse/clickhouse-keeper building
if: ${{ inputs.type == 'patch' }}
run: |
cd "./tests/ci"
export CHECK_NAME="Docker keeper image"
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
- name: Post Slack Message
if: always()
run: |
echo Slack Message

View File

@ -27,19 +27,19 @@ def run_fuzzer(fuzzer: str):
parser.read(path)
if parser.has_section("asan"):
os.environ[
"ASAN_OPTIONS"
] = f"{os.environ['ASAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['asan'].items())}"
os.environ["ASAN_OPTIONS"] = (
f"{os.environ['ASAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['asan'].items())}"
)
if parser.has_section("msan"):
os.environ[
"MSAN_OPTIONS"
] = f"{os.environ['MSAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['msan'].items())}"
os.environ["MSAN_OPTIONS"] = (
f"{os.environ['MSAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['msan'].items())}"
)
if parser.has_section("ubsan"):
os.environ[
"UBSAN_OPTIONS"
] = f"{os.environ['UBSAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['ubsan'].items())}"
os.environ["UBSAN_OPTIONS"] = (
f"{os.environ['UBSAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['ubsan'].items())}"
)
if parser.has_section("libfuzzer"):
custom_libfuzzer_options = " ".join(

View File

@ -3,7 +3,7 @@ aiosignal==1.3.1
astroid==3.1.0
async-timeout==4.0.3
attrs==23.2.0
black==23.12.0
black==24.4.2
boto3==1.34.131
botocore==1.34.131
certifi==2024.6.2

View File

@ -17,6 +17,8 @@ src_paths = ["src", "tests/ci", "tests/sqllogic"]
[tool.pylint.'MESSAGES CONTROL']
# pytest.mark.parametrize is not callable (not-callable)
disable = '''
pointless-string-statement,
line-too-long,
missing-docstring,
too-few-public-methods,
invalid-name,

355
tests/ci/artifactory.py Normal file
View File

@ -0,0 +1,355 @@
import argparse
import time
from pathlib import Path
from typing import Optional
from shutil import copy2
from create_release import PackageDownloader, ReleaseInfo, ShellRunner
from ci_utils import WithIter
class MountPointApp(metaclass=WithIter):
RCLONE = "rclone"
S3FS = "s3fs"
class R2MountPoint:
_TEST_BUCKET_NAME = "repo-test"
_PROD_BUCKET_NAME = "packages"
_CACHE_MAX_SIZE_GB = 20
MOUNT_POINT = "/home/ubuntu/mountpoint"
API_ENDPOINT = "https://d4fd593eebab2e3a58a599400c4cd64d.r2.cloudflarestorage.com"
LOG_FILE = "/home/ubuntu/fuse_mount.log"
# mod time is not required by reprepro and createrepo - disable to simplify bucket's mount sync (applicable fro rclone)
NOMODTIME = True
# enable debug messages in mount log
DEBUG = True
# enable cache for mountpoint
CACHE_ENABLED = False
# TODO: which mode is better: minimal/writes/full/off
_RCLONE_CACHE_MODE = "minimal"
UMASK = "0000"
def __init__(self, app: str, dry_run: bool) -> None:
assert app in MountPointApp
self.app = app
if dry_run:
self.bucket_name = self._TEST_BUCKET_NAME
else:
self.bucket_name = self._PROD_BUCKET_NAME
self.aux_mount_options = ""
self.async_mount = False
if self.app == MountPointApp.S3FS:
self.cache_dir = "/home/ubuntu/s3fs_cache"
# self.aux_mount_options += "-o nomodtime " if self.NOMODTIME else "" not for s3fs
self.aux_mount_options += "--debug " if self.DEBUG else ""
self.aux_mount_options += (
f"-o use_cache={self.cache_dir} -o cache_size_mb={self._CACHE_MAX_SIZE_GB * 1024} "
if self.CACHE_ENABLED
else ""
)
# without -o nomultipart there are errors like "Error 5 writing to /home/ubuntu/***.deb: Input/output error"
self.mount_cmd = f"s3fs {self.bucket_name} {self.MOUNT_POINT} -o url={self.API_ENDPOINT} -o use_path_request_style -o umask=0000 -o nomultipart -o logfile={self.LOG_FILE} {self.aux_mount_options}"
elif self.app == MountPointApp.RCLONE:
# run rclone mount process asynchronously, otherwise subprocess.run(daemonized command) will not return
self.async_mount = True
self.cache_dir = "/home/ubuntu/rclone_cache"
self.aux_mount_options += "--no-modtime " if self.NOMODTIME else ""
self.aux_mount_options += "-v " if self.DEBUG else "" # -vv too verbose
self.aux_mount_options += (
f"--vfs-cache-mode {self._RCLONE_CACHE_MODE} --vfs-cache-max-size {self._CACHE_MAX_SIZE_GB}G"
if self.CACHE_ENABLED
else "--vfs-cache-mode off"
)
# Use --no-modtime to try to avoid: ERROR : rpm/lts/clickhouse-client-24.3.6.5.x86_64.rpm: Failed to apply pending mod time
self.mount_cmd = f"rclone mount remote:{self.bucket_name} {self.MOUNT_POINT} --daemon --cache-dir {self.cache_dir} --umask 0000 --log-file {self.LOG_FILE} {self.aux_mount_options}"
else:
assert False
def init(self):
print(f"Mount bucket [{self.bucket_name}] to [{self.MOUNT_POINT}]")
_CLEAN_LOG_FILE_CMD = f"tail -n 1000 {self.LOG_FILE} > {self.LOG_FILE}_tmp && mv {self.LOG_FILE}_tmp {self.LOG_FILE} ||:"
_MKDIR_CMD = f"mkdir -p {self.MOUNT_POINT}"
_MKDIR_FOR_CACHE = f"mkdir -p {self.cache_dir}"
_UNMOUNT_CMD = (
f"mount | grep -q {self.MOUNT_POINT} && umount {self.MOUNT_POINT} ||:"
)
_TEST_MOUNT_CMD = f"mount | grep -q {self.MOUNT_POINT}"
ShellRunner.run(_CLEAN_LOG_FILE_CMD)
ShellRunner.run(_UNMOUNT_CMD)
ShellRunner.run(_MKDIR_CMD)
ShellRunner.run(_MKDIR_FOR_CACHE)
ShellRunner.run(self.mount_cmd, async_=self.async_mount)
if self.async_mount:
time.sleep(3)
ShellRunner.run(_TEST_MOUNT_CMD)
@classmethod
def teardown(cls):
print(f"Unmount [{cls.MOUNT_POINT}]")
ShellRunner.run(f"umount {cls.MOUNT_POINT}")
class RepoCodenames(metaclass=WithIter):
LTS = "lts"
STABLE = "stable"
class DebianArtifactory:
_TEST_REPO_URL = "https://pub-73dd1910f4284a81a02a67018967e028.r2.dev/deb"
_PROD_REPO_URL = "https://packages.clickhouse.com/deb"
def __init__(self, release_info: ReleaseInfo, dry_run: bool):
self.codename = release_info.codename
self.version = release_info.version
if dry_run:
self.repo_url = self._TEST_REPO_URL
else:
self.repo_url = self._PROD_REPO_URL
assert self.codename in RepoCodenames
self.pd = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
def export_packages(self):
assert self.pd.local_deb_packages_ready(), "BUG: Packages are not downloaded"
print("Start adding packages")
paths = [
self.pd.LOCAL_DIR + "/" + file for file in self.pd.get_deb_packages_files()
]
REPREPRO_CMD_PREFIX = f"reprepro --basedir {R2MountPoint.MOUNT_POINT}/configs/deb --outdir {R2MountPoint.MOUNT_POINT}/deb --verbose"
cmd = f"{REPREPRO_CMD_PREFIX} includedeb {self.codename} {' '.join(paths)}"
print("Running export command:")
print(f" {cmd}")
ShellRunner.run(cmd)
ShellRunner.run("sync")
if self.codename == RepoCodenames.LTS:
packages_with_version = [
package + "=" + self.version for package in self.pd.get_packages_names()
]
print(
f"Copy packages from {RepoCodenames.LTS} to {RepoCodenames.STABLE} repository"
)
cmd = f"{REPREPRO_CMD_PREFIX} copy {RepoCodenames.STABLE} {RepoCodenames.LTS} {' '.join(packages_with_version)}"
print("Running copy command:")
print(f" {cmd}")
ShellRunner.run(cmd)
ShellRunner.run("sync")
def test_packages(self):
ShellRunner.run("docker pull ubuntu:latest")
print(f"Test packages installation, version [{self.version}]")
cmd = f"docker run --rm ubuntu:latest bash -c \"apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; echo 'deb {self.repo_url} stable main' | tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; apt-get install -y clickhouse-client={self.version}\""
print("Running test command:")
print(f" {cmd}")
ShellRunner.run(cmd)
def _copy_if_not_exists(src: Path, dst: Path) -> Path:
if dst.is_dir():
dst = dst / src.name
if not dst.exists():
return copy2(src, dst) # type: ignore
if src.stat().st_size == dst.stat().st_size:
return dst
return copy2(src, dst) # type: ignore
class RpmArtifactory:
_TEST_REPO_URL = (
"https://pub-73dd1910f4284a81a02a67018967e028.r2.dev/rpm/clickhouse.repo"
)
_PROD_REPO_URL = "https://packages.clickhouse.com/rpm/clickhouse.repo"
_SIGN_KEY = "885E2BDCF96B0B45ABF058453E4AD4719DDE9A38"
def __init__(self, release_info: ReleaseInfo, dry_run: bool):
self.codename = release_info.codename
self.version = release_info.version
if dry_run:
self.repo_url = self._TEST_REPO_URL
else:
self.repo_url = self._PROD_REPO_URL
assert self.codename in RepoCodenames
self.pd = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
def export_packages(self, codename: Optional[str] = None) -> None:
assert self.pd.local_rpm_packages_ready(), "BUG: Packages are not downloaded"
codename = codename or self.codename
print(f"Start adding packages to [{codename}]")
paths = [
self.pd.LOCAL_DIR + "/" + file for file in self.pd.get_rpm_packages_files()
]
dest_dir = Path(R2MountPoint.MOUNT_POINT) / "rpm" / codename
for package in paths:
_copy_if_not_exists(Path(package), dest_dir)
commands = (
f"createrepo_c --local-sqlite --workers=2 --update --verbose {dest_dir}",
f"gpg --sign-with {self._SIGN_KEY} --detach-sign --batch --yes --armor {dest_dir / 'repodata' / 'repomd.xml'}",
)
print(f"Exporting RPM packages into [{codename}]")
for command in commands:
print("Running command:")
print(f" {command}")
ShellRunner.run(command)
update_public_key = f"gpg --armor --export {self._SIGN_KEY}"
pub_key_path = dest_dir / "repodata" / "repomd.xml.key"
print("Updating repomd.xml.key")
pub_key_path.write_text(ShellRunner.run(update_public_key)[1])
if codename == RepoCodenames.LTS:
self.export_packages(RepoCodenames.STABLE)
ShellRunner.run("sync")
def test_packages(self):
ShellRunner.run("docker pull fedora:latest")
print(f"Test package installation, version [{self.version}]")
cmd = f'docker run --rm fedora:latest /bin/bash -c "dnf -y install dnf-plugins-core && dnf config-manager --add-repo={self.repo_url} && dnf makecache && dnf -y install clickhouse-client-{self.version}-1"'
print("Running test command:")
print(f" {cmd}")
ShellRunner.run(cmd)
class TgzArtifactory:
_TEST_REPO_URL = "https://pub-73dd1910f4284a81a02a67018967e028.r2.dev/tgz"
_PROD_REPO_URL = "https://packages.clickhouse.com/tgz"
def __init__(self, release_info: ReleaseInfo, dry_run: bool):
self.codename = release_info.codename
self.version = release_info.version
if dry_run:
self.repo_url = self._TEST_REPO_URL
else:
self.repo_url = self._PROD_REPO_URL
assert self.codename in RepoCodenames
self.pd = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
def export_packages(self, codename: Optional[str] = None) -> None:
assert self.pd.local_tgz_packages_ready(), "BUG: Packages are not downloaded"
codename = codename or self.codename
paths = [
self.pd.LOCAL_DIR + "/" + file for file in self.pd.get_tgz_packages_files()
]
dest_dir = Path(R2MountPoint.MOUNT_POINT) / "tgz" / codename
print(f"Exporting TGZ packages into [{codename}]")
for package in paths:
_copy_if_not_exists(Path(package), dest_dir)
if codename == RepoCodenames.LTS:
self.export_packages(RepoCodenames.STABLE)
ShellRunner.run("sync")
def test_packages(self):
tgz_file = "/tmp/tmp.tgz"
tgz_sha_file = "/tmp/tmp.tgz.sha512"
ShellRunner.run(
f"curl -o {tgz_file} -f0 {self.repo_url}/stable/clickhouse-client-{self.version}-arm64.tgz"
)
ShellRunner.run(
f"curl -o {tgz_sha_file} -f0 {self.repo_url}/stable/clickhouse-client-{self.version}-arm64.tgz.sha512"
)
expected_checksum = ShellRunner.run(f"cut -d ' ' -f 1 {tgz_sha_file}")
actual_checksum = ShellRunner.run(f"sha512sum {tgz_file} | cut -d ' ' -f 1")
assert (
expected_checksum == actual_checksum
), f"[{actual_checksum} != {expected_checksum}]"
ShellRunner.run("rm /tmp/tmp.tgz*")
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Adds release packages to the repository",
)
parser.add_argument(
"--infile",
type=str,
required=True,
help="input file with release info",
)
parser.add_argument(
"--export-debian",
action="store_true",
help="Export debian packages to repository",
)
parser.add_argument(
"--export-rpm",
action="store_true",
help="Export rpm packages to repository",
)
parser.add_argument(
"--export-tgz",
action="store_true",
help="Export tgz packages to repository",
)
parser.add_argument(
"--test-debian",
action="store_true",
help="Test debian packages installation",
)
parser.add_argument(
"--test-rpm",
action="store_true",
help="Test rpm packages installation",
)
parser.add_argument(
"--test-tgz",
action="store_true",
help="Test tgz packages installation",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Dry run mode",
)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
assert args.dry_run
release_info = ReleaseInfo.from_file(args.infile)
"""
Use S3FS. RCLONE has some errors with r2 remote which I didn't figure out how to resolve:
ERROR : IO error: NotImplemented: versionId not implemented
Failed to copy: NotImplemented: versionId not implemented
"""
mp = R2MountPoint(MountPointApp.S3FS, dry_run=args.dry_run)
if args.export_debian:
mp.init()
DebianArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
if args.export_rpm:
mp.init()
RpmArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
if args.export_tgz:
mp.init()
TgzArtifactory(release_info, dry_run=args.dry_run).export_packages()
mp.teardown()
if args.test_debian:
DebianArtifactory(release_info, dry_run=args.dry_run).test_packages()
if args.test_tgz:
TgzArtifactory(release_info, dry_run=args.dry_run).test_packages()
if args.test_rpm:
RpmArtifactory(release_info, dry_run=args.dry_run).test_packages()

710
tests/ci/create_release.py Executable file
View File

@ -0,0 +1,710 @@
import argparse
import dataclasses
import json
import os
import subprocess
from contextlib import contextmanager
from copy import copy
from pathlib import Path
from typing import Iterator, List
from git_helper import Git, GIT_PREFIX
from ssh import SSHAgent
from env_helper import GITHUB_REPOSITORY, S3_BUILDS_BUCKET
from s3_helper import S3Helper
from autoscale_runners_lambda.lambda_shared.pr import Labels
from version_helper import (
FILE_WITH_VERSION_PATH,
GENERATED_CONTRIBUTORS,
get_abs_path,
get_version_from_repo,
update_cmake_version,
update_contributors,
VersionType,
)
from ci_config import CI
CMAKE_PATH = get_abs_path(FILE_WITH_VERSION_PATH)
CONTRIBUTORS_PATH = get_abs_path(GENERATED_CONTRIBUTORS)
class ShellRunner:
@classmethod
def run(
cls, command, check_retcode=True, print_output=True, async_=False, dry_run=False
):
if dry_run:
print(f"Dry-run: Would run shell command: [{command}]")
return 0, ""
print(f"Running shell command: [{command}]")
if async_:
subprocess.Popen(command.split(" ")) # pylint:disable=consider-using-with
return 0, ""
result = subprocess.run(
command + " 2>&1",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
check=True,
)
if print_output:
print(result.stdout)
if check_retcode:
assert result.returncode == 0, f"Return code [{result.returncode}]"
return result.returncode, result.stdout
@dataclasses.dataclass
class ReleaseInfo:
version: str
release_tag: str
release_branch: str
commit_sha: str
# lts or stable
codename: str
@staticmethod
def from_file(file_path: str) -> "ReleaseInfo":
with open(file_path, "r", encoding="utf-8") as json_file:
res = json.load(json_file)
return ReleaseInfo(**res)
@staticmethod
def prepare(commit_ref: str, release_type: str, outfile: str) -> None:
Path(outfile).parent.mkdir(parents=True, exist_ok=True)
Path(outfile).unlink(missing_ok=True)
version = None
release_branch = None
release_tag = None
codename = None
assert release_type in ("patch", "new")
if release_type == "new":
# check commit_ref is right and on a right branch
ShellRunner.run(
f"git merge-base --is-ancestor origin/{commit_ref} origin/master"
)
with checkout(commit_ref):
_, commit_sha = ShellRunner.run(f"git rev-parse {commit_ref}")
# Git() must be inside "with checkout" contextmanager
git = Git()
version = get_version_from_repo(git=git)
release_branch = "master"
expected_prev_tag = f"v{version.major}.{version.minor}.1.1-new"
version.bump().with_description(VersionType.NEW)
assert (
git.latest_tag == expected_prev_tag
), f"BUG: latest tag [{git.latest_tag}], expected [{expected_prev_tag}]"
release_tag = version.describe
codename = (
VersionType.STABLE
) # dummy value (artifactory won't be updated for new release)
if release_type == "patch":
with checkout(commit_ref):
_, commit_sha = ShellRunner.run(f"git rev-parse {commit_ref}")
# Git() must be inside "with checkout" contextmanager
git = Git()
version = get_version_from_repo(git=git)
codename = version.get_stable_release_type()
version.with_description(codename)
release_branch = f"{version.major}.{version.minor}"
release_tag = version.describe
ShellRunner.run(f"{GIT_PREFIX} fetch origin {release_branch} --tags")
# check commit is right and on a right branch
ShellRunner.run(
f"git merge-base --is-ancestor {commit_ref} origin/{release_branch}"
)
if version.patch == 1:
expected_version = copy(version)
expected_version.bump()
expected_tag_prefix = (
f"v{expected_version.major}.{expected_version.minor}-"
)
expected_tag_suffix = "-new"
else:
expected_tag_prefix = (
f"v{version.major}.{version.minor}.{version.patch-1}."
)
expected_tag_suffix = f"-{version.get_stable_release_type()}"
if git.latest_tag.startswith(
expected_tag_prefix
) and git.latest_tag.endswith(expected_tag_suffix):
pass
else:
assert (
False
), f"BUG: Unexpected latest tag [{git.latest_tag}] expected [{expected_tag_prefix}*{expected_tag_suffix}]"
assert (
release_branch
and commit_sha
and release_tag
and version
and codename in ("lts", "stable")
)
res = ReleaseInfo(
release_branch=release_branch,
commit_sha=commit_sha,
release_tag=release_tag,
version=version.string,
codename=codename,
)
with open(outfile, "w", encoding="utf-8") as f:
print(json.dumps(dataclasses.asdict(res), indent=2), file=f)
def push_release_tag(self, dry_run: bool) -> None:
if dry_run:
# remove locally created tag from prev run
ShellRunner.run(
f"{GIT_PREFIX} tag -l | grep -q {self.release_tag} && git tag -d {self.release_tag} ||:"
)
# Create release tag
print(
f"Create and push release tag [{self.release_tag}], commit [{self.commit_sha}]"
)
tag_message = f"Release {self.release_tag}"
ShellRunner.run(
f"{GIT_PREFIX} tag -a -m '{tag_message}' {self.release_tag} {self.commit_sha}"
)
cmd_push_tag = f"{GIT_PREFIX} push origin {self.release_tag}:{self.release_tag}"
ShellRunner.run(cmd_push_tag, dry_run=dry_run)
@staticmethod
def _create_gh_label(label: str, color_hex: str, dry_run: bool) -> None:
cmd = f"gh api repos/{GITHUB_REPOSITORY}/labels -f name={label} -f color={color_hex}"
ShellRunner.run(cmd, dry_run=dry_run)
def push_new_release_branch(self, dry_run: bool) -> None:
assert (
self.release_branch == "master"
), "New release branch can be created only for release type [new]"
git = Git()
version = get_version_from_repo(git=git)
new_release_branch = f"{version.major}.{version.minor}"
stable_release_type = version.get_stable_release_type()
version_after_release = copy(version)
version_after_release.bump()
assert (
version_after_release.string == self.version
), f"Unexpected current version in git, must precede [{self.version}] by one step, actual [{version.string}]"
if dry_run:
# remove locally created branch from prev run
ShellRunner.run(
f"{GIT_PREFIX} branch -l | grep -q {new_release_branch} && git branch -d {new_release_branch} ||:"
)
print(
f"Create and push new release branch [{new_release_branch}], commit [{self.commit_sha}]"
)
with checkout(self.release_branch):
with checkout_new(new_release_branch):
pr_labels = f"--label {Labels.RELEASE}"
if stable_release_type == VersionType.LTS:
pr_labels += f" --label {Labels.RELEASE_LTS}"
cmd_push_branch = (
f"{GIT_PREFIX} push --set-upstream origin {new_release_branch}"
)
ShellRunner.run(cmd_push_branch, dry_run=dry_run)
print("Create and push backport tags for new release branch")
ReleaseInfo._create_gh_label(
f"v{new_release_branch}-must-backport", "10dbed", dry_run=dry_run
)
ReleaseInfo._create_gh_label(
f"v{new_release_branch}-affected", "c2bfff", dry_run=dry_run
)
ShellRunner.run(
f"""gh pr create --repo {GITHUB_REPOSITORY} --title 'Release pull request for branch {new_release_branch}'
--head {new_release_branch} {pr_labels}
--body 'This PullRequest is a part of ClickHouse release cycle. It is used by CI system only. Do not perform any changes with it.'
""",
dry_run=dry_run,
)
def update_version_and_contributors_list(self, dry_run: bool) -> None:
# Bump version, update contributors list, create PR
branch_upd_version_contributors = f"bump_version_{self.version}"
with checkout(self.commit_sha):
git = Git()
version = get_version_from_repo(git=git)
if self.release_branch == "master":
version.bump()
version.with_description(VersionType.TESTING)
else:
version.with_description(version.get_stable_release_type())
assert (
version.string == self.version
), f"BUG: version in release info does not match version in git commit, expected [{self.version}], got [{version.string}]"
with checkout(self.release_branch):
with checkout_new(branch_upd_version_contributors):
update_cmake_version(version)
update_contributors(raise_error=True)
cmd_commit_version_upd = f"{GIT_PREFIX} commit '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}' -m 'Update autogenerated version to {self.version} and contributors'"
cmd_push_branch = f"{GIT_PREFIX} push --set-upstream origin {branch_upd_version_contributors}"
body_file = get_abs_path(".github/PULL_REQUEST_TEMPLATE.md")
actor = os.getenv("GITHUB_ACTOR", "") or "me"
cmd_create_pr = f"gh pr create --repo {GITHUB_REPOSITORY} --title 'Update version after release' --head {branch_upd_version_contributors} --base {self.release_branch} --body-file '{body_file} --label 'do not test' --assignee @{actor}"
ShellRunner.run(cmd_commit_version_upd, dry_run=dry_run)
ShellRunner.run(cmd_push_branch, dry_run=dry_run)
ShellRunner.run(cmd_create_pr, dry_run=dry_run)
if dry_run:
ShellRunner.run(
f"{GIT_PREFIX} diff '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}'"
)
ShellRunner.run(
f"{GIT_PREFIX} checkout '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}'"
)
def create_gh_release(self, packages_files: List[str], dry_run: bool) -> None:
repo = os.getenv("GITHUB_REPOSITORY")
assert repo
cmds = []
cmds.append(
f"gh release create --repo {repo} --title 'Release {self.release_tag}' {self.release_tag}"
)
for file in packages_files:
cmds.append(f"gh release upload {self.release_tag} {file}")
if not dry_run:
for cmd in cmds:
ShellRunner.run(cmd)
else:
print("Dry-run, would run commands:")
print("\n * ".join(cmds))
class RepoTypes:
RPM = "rpm"
DEBIAN = "deb"
TGZ = "tgz"
class PackageDownloader:
PACKAGES = (
"clickhouse-client",
"clickhouse-common-static",
"clickhouse-common-static-dbg",
"clickhouse-keeper",
"clickhouse-keeper-dbg",
"clickhouse-server",
)
EXTRA_PACKAGES = (
"clickhouse-library-bridge",
"clickhouse-odbc-bridge",
)
PACKAGE_TYPES = (CI.BuildNames.PACKAGE_RELEASE, CI.BuildNames.PACKAGE_AARCH64)
MACOS_PACKAGE_TO_BIN_SUFFIX = {
CI.BuildNames.BINARY_DARWIN: "macos",
CI.BuildNames.BINARY_DARWIN_AARCH64: "macos-aarch64",
}
LOCAL_DIR = "/tmp/packages"
@classmethod
def _get_arch_suffix(cls, package_arch, repo_type):
if package_arch == CI.BuildNames.PACKAGE_RELEASE:
return (
"amd64" if repo_type in (RepoTypes.DEBIAN, RepoTypes.TGZ) else "x86_64"
)
elif package_arch == CI.BuildNames.PACKAGE_AARCH64:
return (
"arm64" if repo_type in (RepoTypes.DEBIAN, RepoTypes.TGZ) else "aarch64"
)
else:
assert False, "BUG"
def __init__(self, release, commit_sha, version):
assert version.startswith(release), "Invalid release branch or version"
major, minor = map(int, release.split("."))
self.package_names = list(self.PACKAGES)
if major > 24 or (major == 24 and minor > 3):
self.package_names += list(self.EXTRA_PACKAGES)
self.release = release
self.commit_sha = commit_sha
self.version = version
self.s3 = S3Helper()
self.deb_package_files = []
self.rpm_package_files = []
self.tgz_package_files = []
# just binaries for macos
self.macos_package_files = ["clickhouse-macos", "clickhouse-macos-aarch64"]
self.file_to_type = {}
ShellRunner.run(f"mkdir -p {self.LOCAL_DIR}")
for package_type in self.PACKAGE_TYPES:
for package in self.package_names:
deb_package_file_name = f"{package}_{self.version}_{self._get_arch_suffix(package_type, RepoTypes.DEBIAN)}.deb"
self.deb_package_files.append(deb_package_file_name)
self.file_to_type[deb_package_file_name] = package_type
rpm_package_file_name = f"{package}-{self.version}.{self._get_arch_suffix(package_type, RepoTypes.RPM)}.rpm"
self.rpm_package_files.append(rpm_package_file_name)
self.file_to_type[rpm_package_file_name] = package_type
tgz_package_file_name = f"{package}-{self.version}-{self._get_arch_suffix(package_type, RepoTypes.TGZ)}.tgz"
self.tgz_package_files.append(tgz_package_file_name)
self.file_to_type[tgz_package_file_name] = package_type
tgz_package_file_name += ".sha512"
self.tgz_package_files.append(tgz_package_file_name)
self.file_to_type[tgz_package_file_name] = package_type
def get_deb_packages_files(self):
return self.deb_package_files
def get_rpm_packages_files(self):
return self.rpm_package_files
def get_tgz_packages_files(self):
return self.tgz_package_files
def get_macos_packages_files(self):
return self.macos_package_files
def get_packages_names(self):
return self.package_names
def get_all_packages_files(self):
assert self.local_tgz_packages_ready()
assert self.local_deb_packages_ready()
assert self.local_rpm_packages_ready()
assert self.local_macos_packages_ready()
res = []
for package_file in (
self.deb_package_files
+ self.rpm_package_files
+ self.tgz_package_files
+ self.macos_package_files
):
res.append(self.LOCAL_DIR + "/" + package_file)
return res
def run(self):
ShellRunner.run(f"rm -rf {self.LOCAL_DIR}/*")
for package_file in (
self.deb_package_files + self.rpm_package_files + self.tgz_package_files
):
print(f"Downloading: [{package_file}]")
s3_path = "/".join(
[
self.release,
self.commit_sha,
self.file_to_type[package_file],
package_file,
]
)
self.s3.download_file(
bucket=S3_BUILDS_BUCKET,
s3_path=s3_path,
local_file_path="/".join([self.LOCAL_DIR, package_file]),
)
for macos_package, bin_suffix in self.MACOS_PACKAGE_TO_BIN_SUFFIX.items():
binary_name = "clickhouse"
destination_binary_name = f"{binary_name}-{bin_suffix}"
assert destination_binary_name in self.macos_package_files
print(
f"Downloading: [{macos_package}] binary to [{destination_binary_name}]"
)
s3_path = "/".join(
[
self.release,
self.commit_sha,
macos_package,
binary_name,
]
)
self.s3.download_file(
bucket=S3_BUILDS_BUCKET,
s3_path=s3_path,
local_file_path="/".join([self.LOCAL_DIR, destination_binary_name]),
)
def local_deb_packages_ready(self) -> bool:
assert self.deb_package_files
for package_file in self.deb_package_files:
print(f"Check package is downloaded [{package_file}]")
if not Path(self.LOCAL_DIR + "/" + package_file).is_file():
return False
return True
def local_rpm_packages_ready(self) -> bool:
assert self.rpm_package_files
for package_file in self.rpm_package_files:
print(f"Check package is downloaded [{package_file}]")
if not Path(self.LOCAL_DIR + "/" + package_file).is_file():
return False
return True
def local_tgz_packages_ready(self) -> bool:
assert self.tgz_package_files
for package_file in self.tgz_package_files:
print(f"Check package is downloaded [{package_file}]")
if not Path(self.LOCAL_DIR + "/" + package_file).is_file():
return False
return True
def local_macos_packages_ready(self) -> bool:
assert self.macos_package_files
for package_file in self.macos_package_files:
print(f"Check package is downloaded [{package_file}]")
if not Path(self.LOCAL_DIR + "/" + package_file).is_file():
return False
return True
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Creates release",
)
parser.add_argument(
"--prepare-release-info",
action="store_true",
help="Initial step to prepare info like release branch, release tag, etc.",
)
parser.add_argument(
"--push-release-tag",
action="store_true",
help="Creates and pushes git tag",
)
parser.add_argument(
"--push-new-release-branch",
action="store_true",
help="Creates and pushes new release branch and corresponding service gh tags for backports",
)
parser.add_argument(
"--create-bump-version-pr",
action="store_true",
help="Updates version, contributors' list and creates PR",
)
parser.add_argument(
"--download-packages",
action="store_true",
help="Downloads all required packages from s3",
)
parser.add_argument(
"--create-gh-release",
action="store_true",
help="Create GH Release object and attach all packages",
)
parser.add_argument(
"--ref",
type=str,
help="the commit hash or branch",
)
parser.add_argument(
"--release-type",
choices=("new", "patch"),
# dest="release_type",
help="a release type to bump the major.minor.patch version part, "
"new branch is created only for the value 'new'",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="do not make any actual changes in the repo, just show what will be done",
)
parser.add_argument(
"--outfile",
default="",
type=str,
help="output file to write json result to, if not set - stdout",
)
parser.add_argument(
"--infile",
default="",
type=str,
help="input file with release info",
)
return parser.parse_args()
@contextmanager
def checkout(ref: str) -> Iterator[None]:
_, orig_ref = ShellRunner.run(f"{GIT_PREFIX} symbolic-ref --short HEAD")
rollback_cmd = f"{GIT_PREFIX} checkout {orig_ref}"
assert orig_ref
if ref not in (orig_ref,):
ShellRunner.run(f"{GIT_PREFIX} checkout {ref}")
try:
yield
except (Exception, KeyboardInterrupt) as e:
print(f"ERROR: Exception [{e}]")
ShellRunner.run(rollback_cmd)
raise
ShellRunner.run(rollback_cmd)
@contextmanager
def checkout_new(ref: str) -> Iterator[None]:
_, orig_ref = ShellRunner.run(f"{GIT_PREFIX} symbolic-ref --short HEAD")
rollback_cmd = f"{GIT_PREFIX} checkout {orig_ref}"
assert orig_ref
ShellRunner.run(f"{GIT_PREFIX} checkout -b {ref}")
try:
yield
except (Exception, KeyboardInterrupt) as e:
print(f"ERROR: Exception [{e}]")
ShellRunner.run(rollback_cmd)
raise
ShellRunner.run(rollback_cmd)
if __name__ == "__main__":
args = parse_args()
assert args.dry_run
# prepare ssh for git if needed
_ssh_agent = None
_key_pub = None
if os.getenv("ROBOT_CLICKHOUSE_SSH_KEY", ""):
_key = os.getenv("ROBOT_CLICKHOUSE_SSH_KEY")
_ssh_agent = SSHAgent()
_key_pub = _ssh_agent.add(_key)
_ssh_agent.print_keys()
if args.prepare_release_info:
assert (
args.ref and args.release_type and args.outfile
), "--ref, --release-type and --outfile must be provided with --prepare-release-info"
ReleaseInfo.prepare(
commit_ref=args.ref, release_type=args.release_type, outfile=args.outfile
)
if args.push_release_tag:
assert args.infile, "--infile <release info file path> must be provided"
release_info = ReleaseInfo.from_file(args.infile)
release_info.push_release_tag(dry_run=args.dry_run)
if args.push_new_release_branch:
assert args.infile, "--infile <release info file path> must be provided"
release_info = ReleaseInfo.from_file(args.infile)
release_info.push_new_release_branch(dry_run=args.dry_run)
if args.create_bump_version_pr:
# TODO: store link to PR in release info
assert args.infile, "--infile <release info file path> must be provided"
release_info = ReleaseInfo.from_file(args.infile)
release_info.update_version_and_contributors_list(dry_run=args.dry_run)
if args.download_packages:
assert args.infile, "--infile <release info file path> must be provided"
release_info = ReleaseInfo.from_file(args.infile)
p = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
p.run()
if args.create_gh_release:
assert args.infile, "--infile <release info file path> must be provided"
release_info = ReleaseInfo.from_file(args.infile)
p = PackageDownloader(
release=release_info.release_branch,
commit_sha=release_info.commit_sha,
version=release_info.version,
)
release_info.create_gh_release(p.get_all_packages_files(), args.dry_run)
# tear down ssh
if _ssh_agent and _key_pub:
_ssh_agent.remove(_key_pub)
"""
Prepare release machine:
### INSTALL PACKAGES
sudo apt update
sudo apt install --yes --no-install-recommends python3-dev python3-pip gh unzip
sudo apt install --yes python3-boto3
sudo apt install --yes python3-github
sudo apt install --yes python3-unidiff
sudo apt install --yes s3fs
### INSTALL AWS CLI
cd /tmp
curl "https://awscli.amazonaws.com/awscli-exe-linux-$(uname -m).zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
rm -rf aws*
cd -
### INSTALL GH ACTIONS RUNNER:
# Create a folder
RUNNER_VERSION=2.317.0
cd ~
mkdir actions-runner && cd actions-runner
# Download the latest runner package
runner_arch() {
case $(uname -m) in
x86_64 )
echo x64;;
aarch64 )
echo arm64;;
esac
}
curl -O -L https://github.com/actions/runner/releases/download/v$RUNNER_VERSION/actions-runner-linux-$(runner_arch)-$RUNNER_VERSION.tar.gz
# Extract the installer
tar xzf ./actions-runner-linux-$(runner_arch)-$RUNNER_VERSION.tar.gz
rm ./actions-runner-linux-$(runner_arch)-$RUNNER_VERSION.tar.gz
### Install reprepro:
cd ~
sudo apt install dpkg-dev libgpgme-dev libdb-dev libbz2-dev liblzma-dev libarchive-dev shunit2 db-util debhelper
git clone https://salsa.debian.org/debian/reprepro.git
cd reprepro
dpkg-buildpackage -b --no-sign && sudo dpkg -i ../reprepro_$(dpkg-parsechangelog --show-field Version)_$(dpkg-architecture -q DEB_HOST_ARCH).deb
### Install createrepo-c:
sudo apt install createrepo-c
createrepo_c --version
#Version: 0.17.3 (Features: DeltaRPM LegacyWeakdeps )
### Import gpg sign key
gpg --import key.pgp
gpg --list-secret-keys
### Install docker
sudo su; cd ~
deb_arch() {
case $(uname -m) in
x86_64 )
echo amd64;;
aarch64 )
echo arm64;;
esac
}
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
echo "deb [arch=$(deb_arch) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt-get update
sudo apt-get install --yes --no-install-recommends docker-ce docker-buildx-plugin docker-ce-cli containerd.io
sudo usermod -aG docker ubuntu
# enable ipv6 in containers (fixed-cidr-v6 is some random network mask)
cat <<EOT > /etc/docker/daemon.json
{
"ipv6": true,
"fixed-cidr-v6": "2001:db8:1::/64",
"log-driver": "json-file",
"log-opts": {
"max-file": "5",
"max-size": "1000m"
},
"insecure-registries" : ["dockerhub-proxy.dockerhub-proxy-zone:5000"],
"registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"]
}
EOT
# if docker build does not work:
sudo systemctl restart docker
docker buildx rm mybuilder
docker buildx create --name mybuilder --driver docker-container --use
docker buildx inspect mybuilder --bootstrap
### Install tailscale
### Configure GH runner
"""

View File

@ -11,7 +11,6 @@ from os import path as p
from pathlib import Path
from typing import Dict, List
from build_check import get_release_or_pr
from build_download_helper import read_build_urls
from docker_images_helper import DockerImageData, docker_login
from env_helper import (
@ -22,7 +21,7 @@ from env_helper import (
TEMP_PATH,
)
from git_helper import Git
from pr_info import PRInfo
from pr_info import PRInfo, EventType
from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults
from stopwatch import Stopwatch
from tee_popen import TeePopen
@ -63,6 +62,12 @@ def parse_args() -> argparse.Namespace:
help="a version to build, automaticaly got from version_helper, accepts either "
"tag ('refs/tags/' is removed automatically) or a normal 22.2.2.2 format",
)
parser.add_argument(
"--sha",
type=str,
default="",
help="sha of the commit to use packages from",
)
parser.add_argument(
"--release-type",
type=str,
@ -122,7 +127,7 @@ def parse_args() -> argparse.Namespace:
def retry_popen(cmd: str, log_file: Path) -> int:
max_retries = 5
max_retries = 2
for retry in range(max_retries):
# From time to time docker build may failed. Curl issues, or even push
# It will sleep progressively 5, 15, 30 and 50 seconds between retries
@ -370,13 +375,22 @@ def main():
tags = gen_tags(args.version, args.release_type)
repo_urls = {}
direct_urls: Dict[str, List[str]] = {}
release_or_pr, _ = get_release_or_pr(pr_info, args.version)
if pr_info.event_type == EventType.PULL_REQUEST:
release_or_pr = str(pr_info.number)
sha = pr_info.sha
elif pr_info.event_type == EventType.PUSH and pr_info.is_master:
release_or_pr = str(0)
sha = pr_info.sha
else:
release_or_pr = f"{args.version.major}.{args.version.minor}"
sha = args.sha
assert sha
for arch, build_name in zip(ARCH, ("package_release", "package_aarch64")):
if not args.bucket_prefix:
repo_urls[arch] = (
f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/"
f"{release_or_pr}/{pr_info.sha}/{build_name}"
f"{release_or_pr}/{sha}/{build_name}"
)
else:
repo_urls[arch] = f"{args.bucket_prefix}/{build_name}"

View File

@ -37,9 +37,9 @@ class SSHAgent:
ssh_options = (
"," + os.environ["SSH_OPTIONS"] if os.environ.get("SSH_OPTIONS") else ""
)
os.environ[
"SSH_OPTIONS"
] = f"{ssh_options}UserKnownHostsFile=/dev/null,StrictHostKeyChecking=no"
os.environ["SSH_OPTIONS"] = (
f"{ssh_options}UserKnownHostsFile=/dev/null,StrictHostKeyChecking=no"
)
def add(self, key):
key_pub = self._key_pub(key)

View File

@ -16,7 +16,15 @@ from docker_images_helper import get_docker_image, pull_image
from env_helper import IS_CI, REPO_COPY, TEMP_PATH, GITHUB_EVENT_PATH
from git_helper import GIT_PREFIX, git_runner
from pr_info import PRInfo
from report import ERROR, FAILURE, SUCCESS, JobReport, TestResults, read_test_results
from report import (
ERROR,
FAILURE,
SUCCESS,
JobReport,
TestResults,
read_test_results,
FAIL,
)
from ssh import SSHKey
from stopwatch import Stopwatch
@ -192,15 +200,6 @@ def main():
future = executor.submit(subprocess.run, cmd_shell, shell=True)
_ = future.result()
autofix_description = ""
if args.push:
try:
commit_push_staged(pr_info)
except subprocess.SubprocessError:
# do not fail the whole script if the autofix didn't work out
logging.error("Unable to push the autofix. Continue.")
autofix_description = "Failed to push autofix to the PR. "
subprocess.check_call(
f"python3 ../../utils/check-style/process_style_check_result.py --in-results-dir {temp_path} "
f"--out-results-file {temp_path}/test_results.tsv --out-status-file {temp_path}/check_status.tsv || "
@ -210,6 +209,21 @@ def main():
state, description, test_results, additional_files = process_result(temp_path)
autofix_description = ""
fail_cnt = 0
for result in test_results:
if result.status in (FAILURE, FAIL):
# do not autofix if not only black failed
fail_cnt += 1
if args.push and fail_cnt == 1:
try:
commit_push_staged(pr_info)
except subprocess.SubprocessError:
# do not fail the whole script if the autofix didn't work out
logging.error("Unable to push the autofix. Continue.")
autofix_description = "Failed to push autofix to the PR. "
JobReport(
description=f"{autofix_description}{description}",
test_results=test_results,

View File

@ -172,14 +172,10 @@ class TestCIOptions(unittest.TestCase):
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
for job in _TEST_JOB_LIST
}
jobs_configs[
"fuzzers"
].run_by_label = (
jobs_configs["fuzzers"].run_by_label = (
"TEST_LABEL" # check "fuzzers" appears in the result due to the label
)
jobs_configs[
"Integration tests (asan)"
].release_only = (
jobs_configs["Integration tests (asan)"].release_only = (
True # still must be included as it's set with include keywords
)
filtered_jobs = list(
@ -311,9 +307,9 @@ class TestCIOptions(unittest.TestCase):
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
for job in _TEST_JOB_LIST
}
jobs_configs[
"fuzzers"
].run_by_label = "TEST_LABEL" # check "fuzzers" does not appears in the result
jobs_configs["fuzzers"].run_by_label = (
"TEST_LABEL" # check "fuzzers" does not appears in the result
)
jobs_configs["Integration tests (asan)"].release_only = True
filtered_jobs = list(
ci_options.apply(

View File

@ -72,6 +72,19 @@ class ClickHouseVersion:
return self.patch_update()
raise KeyError(f"wrong part {part} is used")
def bump(self) -> "ClickHouseVersion":
if self.minor < 12:
self._minor += 1
self._revision += 1
self._patch = 1
self._tweak = 1
else:
self._major += 1
self._revision += 1
self._patch = 1
self._tweak = 1
return self
def major_update(self) -> "ClickHouseVersion":
if self._git is not None:
self._git.update()
@ -148,6 +161,11 @@ class ClickHouseVersion:
"""our X.3 and X.8 are LTS"""
return self.minor % 5 == 3
def get_stable_release_type(self) -> str:
if self.is_lts:
return VersionType.LTS
return VersionType.STABLE
def as_dict(self) -> VERSIONS:
return {
"revision": self.revision,
@ -168,6 +186,7 @@ class ClickHouseVersion:
raise ValueError(f"version type {version_type} not in {VersionType.VALID}")
self._description = version_type
self._describe = f"v{self.string}-{version_type}"
return self
def copy(self) -> "ClickHouseVersion":
copy = ClickHouseVersion(

View File

@ -711,9 +711,9 @@ def get_localzone():
class SettingsRandomizer:
settings = {
"max_insert_threads": lambda: 12
if random.random() < 0.03
else random.randint(1, 3),
"max_insert_threads": lambda: (
12 if random.random() < 0.03 else random.randint(1, 3)
),
"group_by_two_level_threshold": threshold_generator(0.2, 0.2, 1, 1000000),
"group_by_two_level_threshold_bytes": threshold_generator(
0.2, 0.2, 1, 50000000

View File

@ -1454,9 +1454,9 @@ class ClickHouseCluster:
def setup_azurite_cmd(self, instance, env_variables, docker_compose_yml_dir):
self.with_azurite = True
env_variables["AZURITE_PORT"] = str(self.azurite_port)
env_variables[
"AZURITE_STORAGE_ACCOUNT_URL"
] = f"http://azurite1:{env_variables['AZURITE_PORT']}/devstoreaccount1"
env_variables["AZURITE_STORAGE_ACCOUNT_URL"] = (
f"http://azurite1:{env_variables['AZURITE_PORT']}/devstoreaccount1"
)
env_variables["AZURITE_CONNECTION_STRING"] = (
f"DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;"
f"AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;"
@ -1653,9 +1653,9 @@ class ClickHouseCluster:
# Code coverage files will be placed in database directory
# (affect only WITH_COVERAGE=1 build)
env_variables[
"LLVM_PROFILE_FILE"
] = "/var/lib/clickhouse/server_%h_%p_%m.profraw"
env_variables["LLVM_PROFILE_FILE"] = (
"/var/lib/clickhouse/server_%h_%p_%m.profraw"
)
clickhouse_start_command = CLICKHOUSE_START_COMMAND
if clickhouse_log_file:
@ -1668,9 +1668,9 @@ class ClickHouseCluster:
cluster=self,
base_path=self.base_dir,
name=name,
base_config_dir=base_config_dir
if base_config_dir
else self.base_config_dir,
base_config_dir=(
base_config_dir if base_config_dir else self.base_config_dir
),
custom_main_configs=main_configs or [],
custom_user_configs=user_configs or [],
custom_dictionaries=dictionaries or [],

View File

@ -19,9 +19,9 @@ def cluster():
cluster = ClickHouseCluster(__file__)
cluster.add_instance(
"node",
main_configs=["configs/storage_arm.xml"]
if is_arm()
else ["configs/storage_amd.xml"],
main_configs=(
["configs/storage_arm.xml"] if is_arm() else ["configs/storage_amd.xml"]
),
with_minio=True,
with_hdfs=not is_arm(),
)

View File

@ -5,6 +5,7 @@ in this test we write into per-node tables and read from the distributed table.
The default database in the distributed table definition is left empty on purpose to test
default database deduction.
"""
import pytest
from helpers.client import QueryRuntimeException

View File

@ -2,6 +2,7 @@
This test makes sure interserver cluster queries handle invalid DNS
records for replicas.
"""
from helpers.client import QueryRuntimeException
from helpers.cluster import ClickHouseCluster, ClickHouseInstance

View File

@ -197,7 +197,9 @@ def test_partition_by_string_column(started_cluster):
started_cluster, bucket, "test_foo/bar.csv"
)
assert '3,"йцук"\n' == get_s3_file_content(started_cluster, bucket, "test_йцук.csv")
assert '78,"你好"\n' == get_s3_file_content(started_cluster, bucket, "test_你好.csv")
assert '78,"你好"\n' == get_s3_file_content(
started_cluster, bucket, "test_你好.csv"
)
def test_partition_by_const_column(started_cluster):

View File

@ -1,4 +1,5 @@
"""Test HTTP responses given by the TCP Handler."""
from pathlib import Path
import pytest
from helpers.cluster import ClickHouseCluster

View File

@ -1,4 +1,5 @@
"""Test Interserver responses on configured IP."""
from pathlib import Path
import pytest
from helpers.cluster import ClickHouseCluster

View File

@ -50,7 +50,7 @@ TYPES = {
"UInt32": {"bits": 32, "sign": False, "float": False},
"Int32": {"bits": 32, "sign": True, "float": False},
"UInt64": {"bits": 64, "sign": False, "float": False},
"Int64": {"bits": 64, "sign": True, "float": False}
"Int64": {"bits": 64, "sign": True, "float": False},
# "Float32" : { "bits" : 32, "sign" : True, "float" : True },
# "Float64" : { "bits" : 64, "sign" : True, "float" : True }
}