mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 16:42:05 +00:00
add support for new release branch
Automatic style fix
This commit is contained in:
parent
f8e71707f2
commit
1050217445
25
.github/workflows/create_release.yml
vendored
25
.github/workflows/create_release.yml
vendored
@ -15,9 +15,8 @@ concurrency:
|
|||||||
required: true
|
required: true
|
||||||
type: choice
|
type: choice
|
||||||
options:
|
options:
|
||||||
# TODO:
|
|
||||||
#- new
|
|
||||||
- patch
|
- patch
|
||||||
|
- new
|
||||||
dry-run:
|
dry-run:
|
||||||
description: 'Dry run'
|
description: 'Dry run'
|
||||||
required: false
|
required: false
|
||||||
@ -28,7 +27,6 @@ jobs:
|
|||||||
CreateRelease:
|
CreateRelease:
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
GH_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
||||||
RELEASE_TYPE:
|
|
||||||
runs-on: [self-hosted, release-maker]
|
runs-on: [self-hosted, release-maker]
|
||||||
steps:
|
steps:
|
||||||
- name: DebugInfo
|
- name: DebugInfo
|
||||||
@ -61,11 +59,16 @@ jobs:
|
|||||||
echo "RELEASE_TAG=$release_tag" >> "$GITHUB_ENV"
|
echo "RELEASE_TAG=$release_tag" >> "$GITHUB_ENV"
|
||||||
echo "COMMIT_SHA=$commit_sha" >> "$GITHUB_ENV"
|
echo "COMMIT_SHA=$commit_sha" >> "$GITHUB_ENV"
|
||||||
- name: Download All Release Artifacts
|
- name: Download All Release Artifacts
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/create_release.py --infile "$RELEASE_INFO_FILE" --download-packages ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/create_release.py --infile "$RELEASE_INFO_FILE" --download-packages ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
- name: Push Git Tag for the Release
|
- name: Push Git Tag for the Release
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/create_release.py --push-release-tag --infile "$RELEASE_INFO_FILE" ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/create_release.py --push-release-tag --infile "$RELEASE_INFO_FILE" ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
|
- name: Push New Release Branch
|
||||||
|
if: ${{ inputs.type == 'new' }}
|
||||||
|
run: |
|
||||||
|
python3 ./tests/ci/create_release.py --push-new-release-branch --infile "$RELEASE_INFO_FILE" ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
- name: Bump CH Version and Update Contributors' List
|
- name: Bump CH Version and Update Contributors' List
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/create_release.py --create-bump-version-pr --infile "$RELEASE_INFO_FILE" ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/create_release.py --create-bump-version-pr --infile "$RELEASE_INFO_FILE" ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
@ -73,6 +76,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
git checkout master
|
git checkout master
|
||||||
- name: Bump Docker versions, Changelog, Security
|
- name: Bump Docker versions, Changelog, Security
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
[ "$(git branch --show-current)" != "master" ] && echo "not on the master" && exit 1
|
[ "$(git branch --show-current)" != "master" ] && echo "not on the master" && exit 1
|
||||||
echo "List versions"
|
echo "List versions"
|
||||||
@ -90,8 +94,8 @@ jobs:
|
|||||||
echo "Generate Security"
|
echo "Generate Security"
|
||||||
python3 ./utils/security-generator/generate_security.py > SECURITY.md
|
python3 ./utils/security-generator/generate_security.py > SECURITY.md
|
||||||
git diff HEAD
|
git diff HEAD
|
||||||
- name: Create ChangeLog Pull Request
|
- name: Generate ChangeLog
|
||||||
if: ${{ ! inputs.dry-run }}
|
if: ${{ inputs.type == 'patch' && ! inputs.dry-run }}
|
||||||
uses: peter-evans/create-pull-request@v6
|
uses: peter-evans/create-pull-request@v6
|
||||||
with:
|
with:
|
||||||
author: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
author: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
||||||
@ -115,39 +119,48 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
git checkout "$GITHUB_REF_NAME"
|
git checkout "$GITHUB_REF_NAME"
|
||||||
- name: Create GH Release
|
- name: Create GH Release
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/create_release.py --create-gh-release \
|
python3 ./tests/ci/create_release.py --create-gh-release \
|
||||||
--infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
--infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
|
|
||||||
- name: Export TGZ Packages
|
- name: Export TGZ Packages
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/artifactory.py --export-tgz --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/artifactory.py --export-tgz --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
- name: Test TGZ Packages
|
- name: Test TGZ Packages
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/artifactory.py --test-tgz --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/artifactory.py --test-tgz --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
- name: Export RPM Packages
|
- name: Export RPM Packages
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/artifactory.py --export-rpm --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/artifactory.py --export-rpm --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
- name: Test RPM Packages
|
- name: Test RPM Packages
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/artifactory.py --test-rpm --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/artifactory.py --test-rpm --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
- name: Export Debian Packages
|
- name: Export Debian Packages
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/artifactory.py --export-debian --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/artifactory.py --export-debian --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
- name: Test Debian Packages
|
- name: Test Debian Packages
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
python3 ./tests/ci/artifactory.py --test-debian --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
python3 ./tests/ci/artifactory.py --test-debian --infile ${{ env.RELEASE_INFO_FILE }} ${{ inputs.dry-run && '--dry-run' || '' }}
|
||||||
- name: Docker clickhouse/clickhouse-server building
|
- name: Docker clickhouse/clickhouse-server building
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
cd "./tests/ci"
|
cd "./tests/ci"
|
||||||
export CHECK_NAME="Docker server image"
|
export CHECK_NAME="Docker server image"
|
||||||
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
|
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
|
||||||
- name: Docker clickhouse/clickhouse-keeper building
|
- name: Docker clickhouse/clickhouse-keeper building
|
||||||
|
if: ${{ inputs.type == 'patch' }}
|
||||||
run: |
|
run: |
|
||||||
cd "./tests/ci"
|
cd "./tests/ci"
|
||||||
export CHECK_NAME="Docker keeper image"
|
export CHECK_NAME="Docker keeper image"
|
||||||
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
|
python3 docker_server.py --release-type auto --version ${{ env.RELEASE_TAG }} --check-name "$CHECK_NAME" --sha ${{ env.COMMIT_SHA }} ${{ ! inputs.dry-run && '--push' || '' }}
|
||||||
- name: Post Slack Message
|
- name: Post Slack Message
|
||||||
if: failure()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
echo Slack Message
|
echo Slack Message
|
@ -27,19 +27,19 @@ def run_fuzzer(fuzzer: str):
|
|||||||
parser.read(path)
|
parser.read(path)
|
||||||
|
|
||||||
if parser.has_section("asan"):
|
if parser.has_section("asan"):
|
||||||
os.environ[
|
os.environ["ASAN_OPTIONS"] = (
|
||||||
"ASAN_OPTIONS"
|
f"{os.environ['ASAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['asan'].items())}"
|
||||||
] = f"{os.environ['ASAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['asan'].items())}"
|
)
|
||||||
|
|
||||||
if parser.has_section("msan"):
|
if parser.has_section("msan"):
|
||||||
os.environ[
|
os.environ["MSAN_OPTIONS"] = (
|
||||||
"MSAN_OPTIONS"
|
f"{os.environ['MSAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['msan'].items())}"
|
||||||
] = f"{os.environ['MSAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['msan'].items())}"
|
)
|
||||||
|
|
||||||
if parser.has_section("ubsan"):
|
if parser.has_section("ubsan"):
|
||||||
os.environ[
|
os.environ["UBSAN_OPTIONS"] = (
|
||||||
"UBSAN_OPTIONS"
|
f"{os.environ['UBSAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['ubsan'].items())}"
|
||||||
] = f"{os.environ['UBSAN_OPTIONS']}:{':'.join('%s=%s' % (key, value) for key, value in parser['ubsan'].items())}"
|
)
|
||||||
|
|
||||||
if parser.has_section("libfuzzer"):
|
if parser.has_section("libfuzzer"):
|
||||||
custom_libfuzzer_options = " ".join(
|
custom_libfuzzer_options = " ".join(
|
||||||
|
@ -5,6 +5,7 @@ import os
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from copy import copy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Iterator, List
|
from typing import Iterator, List
|
||||||
|
|
||||||
@ -12,6 +13,7 @@ from git_helper import Git, GIT_PREFIX, Runner
|
|||||||
from ssh import SSHAgent
|
from ssh import SSHAgent
|
||||||
from env_helper import GITHUB_REPOSITORY, S3_BUILDS_BUCKET
|
from env_helper import GITHUB_REPOSITORY, S3_BUILDS_BUCKET
|
||||||
from s3_helper import S3Helper
|
from s3_helper import S3Helper
|
||||||
|
from autoscale_runners_lambda.lambda_shared.pr import Labels
|
||||||
from version_helper import (
|
from version_helper import (
|
||||||
FILE_WITH_VERSION_PATH,
|
FILE_WITH_VERSION_PATH,
|
||||||
GENERATED_CONTRIBUTORS,
|
GENERATED_CONTRIBUTORS,
|
||||||
@ -19,6 +21,7 @@ from version_helper import (
|
|||||||
get_version_from_repo,
|
get_version_from_repo,
|
||||||
update_cmake_version,
|
update_cmake_version,
|
||||||
update_contributors,
|
update_contributors,
|
||||||
|
VersionType,
|
||||||
)
|
)
|
||||||
from git_helper import git_runner as runner
|
from git_helper import git_runner as runner
|
||||||
from ci_config import CI
|
from ci_config import CI
|
||||||
@ -30,7 +33,12 @@ CONTRIBUTORS_PATH = get_abs_path(GENERATED_CONTRIBUTORS)
|
|||||||
class ShellRunner:
|
class ShellRunner:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def run(cls, command, check_retcode=True, print_output=True, async_=False):
|
def run(
|
||||||
|
cls, command, check_retcode=True, print_output=True, async_=False, dry_run=False
|
||||||
|
):
|
||||||
|
if dry_run:
|
||||||
|
print(f"Dry-run: Would run shell command: [{command}]")
|
||||||
|
return 0, ""
|
||||||
print(f"Running shell command: [{command}]")
|
print(f"Running shell command: [{command}]")
|
||||||
if async_:
|
if async_:
|
||||||
subprocess.Popen(command.split(" "))
|
subprocess.Popen(command.split(" "))
|
||||||
@ -73,23 +81,31 @@ class ReleaseInfo:
|
|||||||
release_branch = None
|
release_branch = None
|
||||||
release_tag = None
|
release_tag = None
|
||||||
codename = None
|
codename = None
|
||||||
assert release_type in ("patch",)
|
assert release_type in ("patch", "new")
|
||||||
# if release_type == "new":
|
if release_type == "new":
|
||||||
# assert False, "TODO"
|
# check commit_ref is right and on a right branch
|
||||||
# git = Git()
|
ShellRunner.run(
|
||||||
# version = get_version_from_repo(git=git)
|
f"git merge-base --is-ancestor origin/{commit_ref} origin/master"
|
||||||
# assert runner.check_command(
|
)
|
||||||
# f"git merge-base --is-ancestor {commit_ref} origin/master"
|
with checkout(commit_ref):
|
||||||
# )
|
commit_sha = Runner().run(f"git rev-parse {commit_ref}")
|
||||||
# expected_tag = f"v{version.major}.{version.minor}-new"
|
# Git() must be inside "with checkout" contextmanager
|
||||||
# assert (
|
git = Git()
|
||||||
# git.latest_tag == expected_tag
|
version = get_version_from_repo(git=git)
|
||||||
# ), f"BUG: latest tag [{git.latest_tag}], expected [{expected_tag}]"
|
release_branch = "master"
|
||||||
# release_branch = "master"
|
expected_prev_tag = f"v{version.major}.{version.minor}.1.1-new"
|
||||||
|
version.bump().with_description(VersionType.NEW)
|
||||||
|
assert (
|
||||||
|
git.latest_tag == expected_prev_tag
|
||||||
|
), f"BUG: latest tag [{git.latest_tag}], expected [{expected_prev_tag}]"
|
||||||
|
release_tag = version.describe
|
||||||
|
codename = (
|
||||||
|
VersionType.STABLE
|
||||||
|
) # dummy value (artifactory won't be updated for new release)
|
||||||
if release_type == "patch":
|
if release_type == "patch":
|
||||||
with checkout(commit_ref):
|
with checkout(commit_ref):
|
||||||
# Git() must be inside "with checkout" contextmanager
|
|
||||||
commit_sha = Runner().run(f"git rev-parse {commit_ref}")
|
commit_sha = Runner().run(f"git rev-parse {commit_ref}")
|
||||||
|
# Git() must be inside "with checkout" contextmanager
|
||||||
git = Git()
|
git = Git()
|
||||||
version = get_version_from_repo(git=git)
|
version = get_version_from_repo(git=git)
|
||||||
codename = version.get_stable_release_type()
|
codename = version.get_stable_release_type()
|
||||||
@ -97,11 +113,16 @@ class ReleaseInfo:
|
|||||||
release_branch = f"{version.major}.{version.minor}"
|
release_branch = f"{version.major}.{version.minor}"
|
||||||
release_tag = version.describe
|
release_tag = version.describe
|
||||||
runner.run(f"{GIT_PREFIX} fetch origin {release_branch} --tags")
|
runner.run(f"{GIT_PREFIX} fetch origin {release_branch} --tags")
|
||||||
assert runner.check_command(
|
# check commit is right and on a right branch
|
||||||
|
ShellRunner.run(
|
||||||
f"git merge-base --is-ancestor {commit_ref} origin/{release_branch}"
|
f"git merge-base --is-ancestor {commit_ref} origin/{release_branch}"
|
||||||
)
|
)
|
||||||
if version.patch == 1:
|
if version.patch == 1:
|
||||||
expected_tag_prefix = f"v{version.major}.{version.minor+1}-"
|
expected_version = copy(version)
|
||||||
|
expected_version.bump()
|
||||||
|
expected_tag_prefix = (
|
||||||
|
f"v{expected_version.major}.{expected_version.minor}-"
|
||||||
|
)
|
||||||
expected_tag_suffix = "-new"
|
expected_tag_suffix = "-new"
|
||||||
else:
|
else:
|
||||||
expected_tag_prefix = (
|
expected_tag_prefix = (
|
||||||
@ -157,16 +178,71 @@ class ReleaseInfo:
|
|||||||
print("Dry run, would execute:")
|
print("Dry run, would execute:")
|
||||||
print(f"* {cmd_push_tag}")
|
print(f"* {cmd_push_tag}")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _create_gh_label(label: str, color_hex: str, dry_run: bool):
|
||||||
|
cmd = f"gh api repos/{GITHUB_REPOSITORY}/labels -f name={label} -f color={color_hex}"
|
||||||
|
ShellRunner.run(cmd, dry_run=dry_run)
|
||||||
|
|
||||||
|
def push_new_release_branch(self, dry_run: bool) -> None:
|
||||||
|
assert (
|
||||||
|
self.release_branch == "master"
|
||||||
|
), "New release branch can be created only for release type [new]"
|
||||||
|
git = Git()
|
||||||
|
version = get_version_from_repo(git=git)
|
||||||
|
new_release_branch = f"{version.major}.{version.minor}"
|
||||||
|
stable_release_type = version.get_stable_release_type()
|
||||||
|
version_after_release = copy(version)
|
||||||
|
version_after_release.bump()
|
||||||
|
assert (
|
||||||
|
version_after_release.string == self.version
|
||||||
|
), f"Unexpected current version in git, must precede [{self.version}] by one step, actual [{version.string}]"
|
||||||
|
if dry_run:
|
||||||
|
# remove locally created branch from prev run
|
||||||
|
ShellRunner.run(
|
||||||
|
f"{GIT_PREFIX} branch -l | grep -q {new_release_branch} && git branch -d {new_release_branch} ||:"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"Create and push new release branch [{new_release_branch}], commit [{self.commit_sha}]"
|
||||||
|
)
|
||||||
|
with checkout(self.release_branch):
|
||||||
|
with checkout_new(new_release_branch):
|
||||||
|
pr_labels = f"--label {Labels.RELEASE}"
|
||||||
|
if stable_release_type == VersionType.LTS:
|
||||||
|
pr_labels += f" --label {Labels.RELEASE_LTS}"
|
||||||
|
cmd_push_branch = (
|
||||||
|
f"{GIT_PREFIX} push --set-upstream origin {new_release_branch}"
|
||||||
|
)
|
||||||
|
ShellRunner.run(cmd_push_branch, dry_run=dry_run)
|
||||||
|
|
||||||
|
print("Create and push backport tags for new release branch")
|
||||||
|
ReleaseInfo._create_gh_label(
|
||||||
|
f"v{new_release_branch}-must-backport", "10dbed", dry_run=dry_run
|
||||||
|
)
|
||||||
|
ReleaseInfo._create_gh_label(
|
||||||
|
f"v{new_release_branch}-affected", "c2bfff", dry_run=dry_run
|
||||||
|
)
|
||||||
|
ShellRunner.run(
|
||||||
|
f"""gh pr create --repo {GITHUB_REPOSITORY} --title 'Release pull request for branch {new_release_branch}'
|
||||||
|
--head {new_release_branch} {pr_labels}
|
||||||
|
--body 'This PullRequest is a part of ClickHouse release cycle. It is used by CI system only. Do not perform any changes with it.'
|
||||||
|
""",
|
||||||
|
dry_run=dry_run,
|
||||||
|
)
|
||||||
|
|
||||||
def update_version_and_contributors_list(self, dry_run: bool) -> None:
|
def update_version_and_contributors_list(self, dry_run: bool) -> None:
|
||||||
# Bump version, update contributors list, create PR
|
# Bump version, update contributors list, create PR
|
||||||
branch_upd_version_contributors = f"bump_version_{self.version}"
|
branch_upd_version_contributors = f"bump_version_{self.version}"
|
||||||
with checkout(self.commit_sha):
|
with checkout(self.commit_sha):
|
||||||
git = Git()
|
git = Git()
|
||||||
version = get_version_from_repo(git=git)
|
version = get_version_from_repo(git=git)
|
||||||
|
if self.release_branch == "master":
|
||||||
|
version.bump()
|
||||||
|
version.with_description(VersionType.TESTING)
|
||||||
|
else:
|
||||||
version.with_description(version.get_stable_release_type())
|
version.with_description(version.get_stable_release_type())
|
||||||
assert (
|
assert (
|
||||||
version.string == self.version
|
version.string == self.version
|
||||||
), "BUG: version in release info does not match version in git commit"
|
), f"BUG: version in release info does not match version in git commit, expected [{self.version}], got [{version.string}]"
|
||||||
with checkout(self.release_branch):
|
with checkout(self.release_branch):
|
||||||
with checkout_new(branch_upd_version_contributors):
|
with checkout_new(branch_upd_version_contributors):
|
||||||
update_cmake_version(version)
|
update_cmake_version(version)
|
||||||
@ -430,6 +506,11 @@ def parse_args() -> argparse.Namespace:
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Creates and pushes git tag",
|
help="Creates and pushes git tag",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--push-new-release-branch",
|
||||||
|
action="store_true",
|
||||||
|
help="Creates and pushes new release branch and corresponding service gh tags for backports",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--create-bump-version-pr",
|
"--create-bump-version-pr",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@ -533,6 +614,10 @@ if __name__ == "__main__":
|
|||||||
assert args.infile, "--infile <release info file path> must be provided"
|
assert args.infile, "--infile <release info file path> must be provided"
|
||||||
release_info = ReleaseInfo.from_file(args.infile)
|
release_info = ReleaseInfo.from_file(args.infile)
|
||||||
release_info.push_release_tag(dry_run=args.dry_run)
|
release_info.push_release_tag(dry_run=args.dry_run)
|
||||||
|
if args.push_new_release_branch:
|
||||||
|
assert args.infile, "--infile <release info file path> must be provided"
|
||||||
|
release_info = ReleaseInfo.from_file(args.infile)
|
||||||
|
release_info.push_new_release_branch(dry_run=args.dry_run)
|
||||||
if args.create_bump_version_pr:
|
if args.create_bump_version_pr:
|
||||||
# TODO: store link to PR in release info
|
# TODO: store link to PR in release info
|
||||||
assert args.infile, "--infile <release info file path> must be provided"
|
assert args.infile, "--infile <release info file path> must be provided"
|
||||||
@ -563,7 +648,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Prepare release machine (for arm machine):
|
Prepare release machine:
|
||||||
|
|
||||||
### INSTALL PACKAGES
|
### INSTALL PACKAGES
|
||||||
sudo apt update
|
sudo apt update
|
||||||
|
@ -37,9 +37,9 @@ class SSHAgent:
|
|||||||
ssh_options = (
|
ssh_options = (
|
||||||
"," + os.environ["SSH_OPTIONS"] if os.environ.get("SSH_OPTIONS") else ""
|
"," + os.environ["SSH_OPTIONS"] if os.environ.get("SSH_OPTIONS") else ""
|
||||||
)
|
)
|
||||||
os.environ[
|
os.environ["SSH_OPTIONS"] = (
|
||||||
"SSH_OPTIONS"
|
f"{ssh_options}UserKnownHostsFile=/dev/null,StrictHostKeyChecking=no"
|
||||||
] = f"{ssh_options}UserKnownHostsFile=/dev/null,StrictHostKeyChecking=no"
|
)
|
||||||
|
|
||||||
def add(self, key):
|
def add(self, key):
|
||||||
key_pub = self._key_pub(key)
|
key_pub = self._key_pub(key)
|
||||||
|
@ -172,14 +172,10 @@ class TestCIOptions(unittest.TestCase):
|
|||||||
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
|
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
|
||||||
for job in _TEST_JOB_LIST
|
for job in _TEST_JOB_LIST
|
||||||
}
|
}
|
||||||
jobs_configs[
|
jobs_configs["fuzzers"].run_by_label = (
|
||||||
"fuzzers"
|
|
||||||
].run_by_label = (
|
|
||||||
"TEST_LABEL" # check "fuzzers" appears in the result due to the label
|
"TEST_LABEL" # check "fuzzers" appears in the result due to the label
|
||||||
)
|
)
|
||||||
jobs_configs[
|
jobs_configs["Integration tests (asan)"].release_only = (
|
||||||
"Integration tests (asan)"
|
|
||||||
].release_only = (
|
|
||||||
True # still must be included as it's set with include keywords
|
True # still must be included as it's set with include keywords
|
||||||
)
|
)
|
||||||
filtered_jobs = list(
|
filtered_jobs = list(
|
||||||
@ -311,9 +307,9 @@ class TestCIOptions(unittest.TestCase):
|
|||||||
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
|
job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER)
|
||||||
for job in _TEST_JOB_LIST
|
for job in _TEST_JOB_LIST
|
||||||
}
|
}
|
||||||
jobs_configs[
|
jobs_configs["fuzzers"].run_by_label = (
|
||||||
"fuzzers"
|
"TEST_LABEL" # check "fuzzers" does not appears in the result
|
||||||
].run_by_label = "TEST_LABEL" # check "fuzzers" does not appears in the result
|
)
|
||||||
jobs_configs["Integration tests (asan)"].release_only = True
|
jobs_configs["Integration tests (asan)"].release_only = True
|
||||||
filtered_jobs = list(
|
filtered_jobs = list(
|
||||||
ci_options.apply(
|
ci_options.apply(
|
||||||
|
@ -72,6 +72,19 @@ class ClickHouseVersion:
|
|||||||
return self.patch_update()
|
return self.patch_update()
|
||||||
raise KeyError(f"wrong part {part} is used")
|
raise KeyError(f"wrong part {part} is used")
|
||||||
|
|
||||||
|
def bump(self) -> "ClickHouseVersion":
|
||||||
|
if self.minor < 12:
|
||||||
|
self._minor += 1
|
||||||
|
self._revision += 1
|
||||||
|
self._patch = 1
|
||||||
|
self._tweak = 1
|
||||||
|
else:
|
||||||
|
self._major += 1
|
||||||
|
self._revision += 1
|
||||||
|
self._patch = 1
|
||||||
|
self._tweak = 1
|
||||||
|
return self
|
||||||
|
|
||||||
def major_update(self) -> "ClickHouseVersion":
|
def major_update(self) -> "ClickHouseVersion":
|
||||||
if self._git is not None:
|
if self._git is not None:
|
||||||
self._git.update()
|
self._git.update()
|
||||||
|
@ -1454,9 +1454,9 @@ class ClickHouseCluster:
|
|||||||
def setup_azurite_cmd(self, instance, env_variables, docker_compose_yml_dir):
|
def setup_azurite_cmd(self, instance, env_variables, docker_compose_yml_dir):
|
||||||
self.with_azurite = True
|
self.with_azurite = True
|
||||||
env_variables["AZURITE_PORT"] = str(self.azurite_port)
|
env_variables["AZURITE_PORT"] = str(self.azurite_port)
|
||||||
env_variables[
|
env_variables["AZURITE_STORAGE_ACCOUNT_URL"] = (
|
||||||
"AZURITE_STORAGE_ACCOUNT_URL"
|
f"http://azurite1:{env_variables['AZURITE_PORT']}/devstoreaccount1"
|
||||||
] = f"http://azurite1:{env_variables['AZURITE_PORT']}/devstoreaccount1"
|
)
|
||||||
env_variables["AZURITE_CONNECTION_STRING"] = (
|
env_variables["AZURITE_CONNECTION_STRING"] = (
|
||||||
f"DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;"
|
f"DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;"
|
||||||
f"AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;"
|
f"AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;"
|
||||||
@ -1653,9 +1653,9 @@ class ClickHouseCluster:
|
|||||||
|
|
||||||
# Code coverage files will be placed in database directory
|
# Code coverage files will be placed in database directory
|
||||||
# (affect only WITH_COVERAGE=1 build)
|
# (affect only WITH_COVERAGE=1 build)
|
||||||
env_variables[
|
env_variables["LLVM_PROFILE_FILE"] = (
|
||||||
"LLVM_PROFILE_FILE"
|
"/var/lib/clickhouse/server_%h_%p_%m.profraw"
|
||||||
] = "/var/lib/clickhouse/server_%h_%p_%m.profraw"
|
)
|
||||||
|
|
||||||
clickhouse_start_command = CLICKHOUSE_START_COMMAND
|
clickhouse_start_command = CLICKHOUSE_START_COMMAND
|
||||||
if clickhouse_log_file:
|
if clickhouse_log_file:
|
||||||
@ -1668,9 +1668,9 @@ class ClickHouseCluster:
|
|||||||
cluster=self,
|
cluster=self,
|
||||||
base_path=self.base_dir,
|
base_path=self.base_dir,
|
||||||
name=name,
|
name=name,
|
||||||
base_config_dir=base_config_dir
|
base_config_dir=(
|
||||||
if base_config_dir
|
base_config_dir if base_config_dir else self.base_config_dir
|
||||||
else self.base_config_dir,
|
),
|
||||||
custom_main_configs=main_configs or [],
|
custom_main_configs=main_configs or [],
|
||||||
custom_user_configs=user_configs or [],
|
custom_user_configs=user_configs or [],
|
||||||
custom_dictionaries=dictionaries or [],
|
custom_dictionaries=dictionaries or [],
|
||||||
|
@ -19,9 +19,9 @@ def cluster():
|
|||||||
cluster = ClickHouseCluster(__file__)
|
cluster = ClickHouseCluster(__file__)
|
||||||
cluster.add_instance(
|
cluster.add_instance(
|
||||||
"node",
|
"node",
|
||||||
main_configs=["configs/storage_arm.xml"]
|
main_configs=(
|
||||||
if is_arm()
|
["configs/storage_arm.xml"] if is_arm() else ["configs/storage_amd.xml"]
|
||||||
else ["configs/storage_amd.xml"],
|
),
|
||||||
with_minio=True,
|
with_minio=True,
|
||||||
with_hdfs=not is_arm(),
|
with_hdfs=not is_arm(),
|
||||||
)
|
)
|
||||||
|
@ -5,6 +5,7 @@ in this test we write into per-node tables and read from the distributed table.
|
|||||||
The default database in the distributed table definition is left empty on purpose to test
|
The default database in the distributed table definition is left empty on purpose to test
|
||||||
default database deduction.
|
default database deduction.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from helpers.client import QueryRuntimeException
|
from helpers.client import QueryRuntimeException
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
This test makes sure interserver cluster queries handle invalid DNS
|
This test makes sure interserver cluster queries handle invalid DNS
|
||||||
records for replicas.
|
records for replicas.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from helpers.client import QueryRuntimeException
|
from helpers.client import QueryRuntimeException
|
||||||
from helpers.cluster import ClickHouseCluster, ClickHouseInstance
|
from helpers.cluster import ClickHouseCluster, ClickHouseInstance
|
||||||
|
|
||||||
|
@ -197,7 +197,9 @@ def test_partition_by_string_column(started_cluster):
|
|||||||
started_cluster, bucket, "test_foo/bar.csv"
|
started_cluster, bucket, "test_foo/bar.csv"
|
||||||
)
|
)
|
||||||
assert '3,"йцук"\n' == get_s3_file_content(started_cluster, bucket, "test_йцук.csv")
|
assert '3,"йцук"\n' == get_s3_file_content(started_cluster, bucket, "test_йцук.csv")
|
||||||
assert '78,"你好"\n' == get_s3_file_content(started_cluster, bucket, "test_你好.csv")
|
assert '78,"你好"\n' == get_s3_file_content(
|
||||||
|
started_cluster, bucket, "test_你好.csv"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_partition_by_const_column(started_cluster):
|
def test_partition_by_const_column(started_cluster):
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test HTTP responses given by the TCP Handler."""
|
"""Test HTTP responses given by the TCP Handler."""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import pytest
|
import pytest
|
||||||
from helpers.cluster import ClickHouseCluster
|
from helpers.cluster import ClickHouseCluster
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Test Interserver responses on configured IP."""
|
"""Test Interserver responses on configured IP."""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import pytest
|
import pytest
|
||||||
from helpers.cluster import ClickHouseCluster
|
from helpers.cluster import ClickHouseCluster
|
||||||
|
@ -50,7 +50,7 @@ TYPES = {
|
|||||||
"UInt32": {"bits": 32, "sign": False, "float": False},
|
"UInt32": {"bits": 32, "sign": False, "float": False},
|
||||||
"Int32": {"bits": 32, "sign": True, "float": False},
|
"Int32": {"bits": 32, "sign": True, "float": False},
|
||||||
"UInt64": {"bits": 64, "sign": False, "float": False},
|
"UInt64": {"bits": 64, "sign": False, "float": False},
|
||||||
"Int64": {"bits": 64, "sign": True, "float": False}
|
"Int64": {"bits": 64, "sign": True, "float": False},
|
||||||
# "Float32" : { "bits" : 32, "sign" : True, "float" : True },
|
# "Float32" : { "bits" : 32, "sign" : True, "float" : True },
|
||||||
# "Float64" : { "bits" : 64, "sign" : True, "float" : True }
|
# "Float64" : { "bits" : 64, "sign" : True, "float" : True }
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user