Merge pull request #32218 from ClickHouse/release-workflow

Create release workflow
This commit is contained in:
Mikhail f. Shiryaev 2022-01-19 16:21:57 +01:00 committed by GitHub
commit 4cb86ad616
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 281 additions and 157 deletions

121
.github/workflows/docs_release.yml vendored Normal file
View File

@ -0,0 +1,121 @@
name: DocsReleaseChecks
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
concurrency:
group: master-release
cancel-in-progress: true
on: # yamllint disable-line rule:truthy
push:
branches:
- master
paths:
- 'docs/**'
- 'website/**'
- 'benchmark/**'
- 'docker/**'
- '.github/**'
workflow_dispatch:
jobs:
DockerHubPushAarch64:
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/changed_images.json
DocsRelease:
needs: DockerHubPush
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/docs_release
REPO_COPY=${{runner.temp}}/docs_release/ClickHouse
CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}}
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
RCSK
EOF
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ env.TEMP_PATH }}
- name: Docs Release
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 docs_release.py
- name: Cleanup
if: always()
run: |
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"

View File

@ -1,121 +1,39 @@
name: DocsReleaseChecks
name: ReleaseWorkflow
# - Gets artifacts from S3
# - Sends it to JFROG Artifactory
# - Adds them to the release assets
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
concurrency:
group: master-release
cancel-in-progress: true
on: # yamllint disable-line rule:truthy
push:
branches:
- master
paths:
- 'docs/**'
- 'website/**'
- 'benchmark/**'
- 'docker/**'
- '.github/**'
workflow_dispatch:
release:
types:
- published
jobs:
DockerHubPushAarch64:
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
ReleasePublish:
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/changed_images.json
DocsRelease:
needs: DockerHubPush
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/docs_release
REPO_COPY=${{runner.temp}}/docs_release/ClickHouse
CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}}
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
RCSK
EOF
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ env.TEMP_PATH }}
- name: Docs Release
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 docs_release.py
- name: Cleanup
if: always()
run: |
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
JFROG_API_KEY=${{ secrets.JFROG_KEY_API_PACKAGES }}
TEMP_PATH=${{runner.temp}}/release_packages
REPO_COPY=${{runner.temp}}/release_packages/ClickHouse
EOF
- name: Check out repository code
uses: actions/checkout@v2
- name: Download packages and push to Artifactory
env:
run: |
rm -rf "$TEMP_PATH" && mkdir -p "$REPO_COPY"
cp -r "$GITHUB_WORKSPACE" "$REPO_COPY"
cd "$REPO_COPY"
python3 ./tests/ci/push_to_artifactory.py --release "${{ github.ref }}" \
--commit '${{ github.sha }}' --all
- name: Upload packages to release assets
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ${{runner.temp}}/release_packages/*
overwrite: true
tag: ${{ github.ref }}
file_glob: true

View File

@ -4,13 +4,27 @@ import argparse
import logging
import os
import re
from typing import Tuple
from artifactory import ArtifactorySaaSPath
from artifactory import ArtifactorySaaSPath # type: ignore
from build_download_helper import dowload_build_with_progress
# Py 3.8 removeprefix and removesuffix
def removeprefix(string: str, prefix: str):
if string.startswith(prefix):
return string[len(prefix) :] # noqa: ignore E203, false positive
return string
def removesuffix(string: str, suffix: str):
if string.endswith(suffix):
return string[: -len(suffix)]
return string
# Necessary ENV variables
def getenv(name, default=None):
def getenv(name: str, default: str = None):
env = os.getenv(name, default)
if env is not None:
return env
@ -44,14 +58,16 @@ class Packages:
for name, arch in self.packages
)
self.tgz = tuple("{}-{}.tgz".format(name, version) for name, _ in self.packages)
def arch(self, deb_pkg: str) -> str:
if deb_pkg not in self.deb:
raise ValueError("{} not in {}".format(deb_pkg, self.deb))
return deb_pkg.removesuffix(".deb").split("_")[-1]
return removesuffix(deb_pkg, ".deb").split("_")[-1]
@staticmethod
def path(package):
return os.path.join(TEMP_PATH, package)
def path(package_file: str) -> str:
return os.path.join(TEMP_PATH, package_file)
class S3:
@ -76,6 +92,7 @@ class S3:
commit: str,
check_name: str,
version: str,
force_download: bool,
):
self._common = dict(
bucket_name=bucket_name,
@ -83,31 +100,40 @@ class S3:
commit=commit,
check_name=check_name,
)
self.force_download = force_download
self.packages = Packages(version)
def download_package(self, package):
url = self.template.format_map({**self._common, "package": package})
dowload_build_with_progress(url, Packages.path(package))
def download_package(self, package_file: str):
if not self.force_download and os.path.exists(Packages.path(package_file)):
return
url = self.template.format_map({**self._common, "package": package_file})
dowload_build_with_progress(url, Packages.path(package_file))
def download_deb(self):
for package in self.packages.deb:
self.download_package(package)
for package_file in self.packages.deb:
self.download_package(package_file)
def download_rpm(self):
for package in self.packages.rpm:
self.download_package(package)
for package_file in self.packages.rpm:
self.download_package(package_file)
def download_tgz(self):
for package_file in self.packages.tgz:
self.download_package(package_file)
class Release:
def __init__(self, name: str) -> str:
def __init__(self, name: str):
r = re.compile(r"^v\d{2}[.]\d+[.]\d+[.]\d+-(testing|prestable|stable|lts)$")
# Automatically remove refs/tags/ if full refname passed here
name = removeprefix(name, "refs/tags/")
if not r.match(name):
raise argparse.ArgumentTypeError(
"release name does not match "
f"release name {name} does not match "
"v12.1.2.15-(testing|prestable|stable|lts) pattern"
)
self._name = name
self._version = self._name.removeprefix("v")
self._version = removeprefix(self._name, "v")
self._version = self.version.split("-")[0]
self._version_parts = tuple(self.version.split("."))
self._type = self._name.split("-")[-1]
@ -117,7 +143,7 @@ class Release:
return self._version
@property
def version_parts(self) -> str:
def version_parts(self) -> Tuple[str, ...]:
return self._version_parts
@property
@ -126,37 +152,47 @@ class Release:
class Artifactory:
def __init__(self, url: str, release: str, deb_repo="deb", rpm_repo="rpm"):
def __init__(
self, url: str, release: str, deb_repo="deb", rpm_repo="rpm", tgz_repo="tgz"
):
self._url = url
self._release = release
self._deb_url = "/".join((self._url, deb_repo, "pool", self._release)) + "/"
self._rpm_url = "/".join((self._url, rpm_repo, self._release)) + "/"
self._tgz_url = "/".join((self._url, tgz_repo, self._release)) + "/"
# check the credentials ENVs for early exit
self.__path_helper("_deb", "")
def deploy_deb(self, packages: Packages):
for package in packages.deb:
path = packages.path(package)
for package_file in packages.deb:
path = packages.path(package_file)
dist = self._release
comp = "main"
arch = packages.arch(package)
arch = packages.arch(package_file)
logging.info(
"Deploy %s(distribution=%s;component=%s;architecture=%s) to artifactory",
"Deploy %s(distribution=%s;component=%s;architecture=%s) "
"to artifactory",
path,
dist,
comp,
arch,
)
self.deb(package).deploy_deb(path, dist, comp, arch)
self.deb_path(package_file).deploy_deb(path, dist, comp, arch)
def deploy_rpm(self, packages: Packages):
for package in packages.rpm:
path = packages.path(package)
for package_file in packages.rpm:
path = packages.path(package_file)
logging.info("Deploy %s to artifactory", path)
self.rpm(package).deploy_file(path)
self.rpm_path(package_file).deploy_file(path)
def __path_helper(self, name, package) -> ArtifactorySaaSPath:
url = "/".join((getattr(self, name + "_url"), package))
def deploy_tgz(self, packages: Packages):
for package_file in packages.tgz:
path = packages.path(package_file)
logging.info("Deploy %s to artifactory", path)
self.tgz_path(package_file).deploy_file(path)
def __path_helper(self, name: str, package_file: str) -> ArtifactorySaaSPath:
url = "/".join((getattr(self, name + "_url"), package_file))
path = None
if JFROG_API_KEY:
path = ArtifactorySaaSPath(url, apikey=JFROG_API_KEY)
@ -166,14 +202,17 @@ class Artifactory:
raise KeyError("Neither JFROG_API_KEY nor JFROG_TOKEN env are defined")
return path
def deb(self, package) -> ArtifactorySaaSPath:
return self.__path_helper("_deb", package)
def deb_path(self, package_file: str) -> ArtifactorySaaSPath:
return self.__path_helper("_deb", package_file)
def rpm(self, package) -> ArtifactorySaaSPath:
return self.__path_helper("_rpm", package)
def rpm_path(self, package_file: str) -> ArtifactorySaaSPath:
return self.__path_helper("_rpm", package_file)
def tgz_path(self, package_file: str) -> ArtifactorySaaSPath:
return self.__path_helper("_tgz", package_file)
def commit(name):
def commit(name: str):
r = re.compile(r"^([0-9]|[a-f]){40}$")
if not r.match(name):
raise argparse.ArgumentTypeError(
@ -193,7 +232,8 @@ def parse_args() -> argparse.Namespace:
"--release",
required=True,
type=Release,
help="release name, e.g. v12.13.14.15-prestable",
help="release name, e.g. v12.13.14.15-prestable; 'refs/tags/' "
"prefix is striped automatically",
)
parser.add_argument(
"--pull-request",
@ -216,6 +256,9 @@ def parse_args() -> argparse.Namespace:
help="check name, a part of bucket path, "
"will be converted to lower case with spaces->underscore",
)
parser.add_argument(
"--all", action="store_true", help="implies all deb, rpm and tgz"
)
parser.add_argument(
"--deb", action="store_true", help="if Debian packages should be processed"
)
@ -223,18 +266,56 @@ def parse_args() -> argparse.Namespace:
"--rpm", action="store_true", help="if RPM packages should be processed"
)
parser.add_argument(
"--artifactory-url", default="https://clickhousedb.jfrog.io/artifactory"
"--tgz",
action="store_true",
help="if tgz archives should be processed. They aren't pushed to artifactory",
)
parser.add_argument(
"--artifactory-url",
default="https://clickhousedb.jfrog.io/artifactory",
help="SaaS Artifactory url",
)
parser.add_argument(
"-n",
"--no-artifactory",
action="store_true",
help="do not push packages to artifactory",
)
parser.add_argument(
"--no-force-download",
action="store_true",
help="do not download packages again if they exist already",
)
args = parser.parse_args()
if not args.deb and not args.rpm:
parser.error("at least one of --deb and --rpm should be specified")
if args.all:
args.deb = args.rpm = args.tgz = True
if not (args.deb or args.rpm or args.tgz):
parser.error("at least one of --deb, --rpm or --tgz should be specified")
args.check_name = args.check_name.lower().replace(" ", "_")
if args.pull_request == 0:
args.pull_request = ".".join(args.release.version_parts[:2])
return args
def process_deb(s3: S3, art_client: Artifactory):
s3.download_deb()
if art_client is not None:
art_client.deploy_deb(s3.packages)
def process_rpm(s3: S3, art_client: Artifactory):
s3.download_rpm()
if art_client is not None:
art_client.deploy_rpm(s3.packages)
def process_tgz(s3: S3, art_client: Artifactory):
s3.download_tgz()
if art_client is not None:
art_client.deploy_tgz(s3.packages)
def main():
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
args = parse_args()
@ -244,14 +325,18 @@ def main():
args.commit,
args.check_name,
args.release.version,
not args.no_force_download,
)
art_client = Artifactory(args.artifactory_url, args.release.type)
art_client = None
if not args.no_artifactory:
art_client = Artifactory(args.artifactory_url, args.release.type)
if args.deb:
s3.download_deb()
art_client.deploy_deb(s3.packages)
process_deb(s3, art_client)
if args.rpm:
s3.download_rpm()
art_client.deploy_rpm(s3.packages)
process_rpm(s3, art_client)
if args.tgz:
process_tgz(s3, art_client)
if __name__ == "__main__":