mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 09:32:06 +00:00
Merge pull request #46240 from ClickHouse/sccache
Use sccache and clang-tidy-cache with S3 cache backend
This commit is contained in:
commit
5076cdb7fb
@ -568,7 +568,7 @@ if (NATIVE_BUILD_TARGETS
|
|||||||
COMMAND ${CMAKE_COMMAND}
|
COMMAND ${CMAKE_COMMAND}
|
||||||
"-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}"
|
"-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}"
|
||||||
"-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}"
|
"-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}"
|
||||||
"-DENABLE_CCACHE=${ENABLE_CCACHE}"
|
"-DCOMPILER_CACHE=${COMPILER_CACHE}"
|
||||||
# Avoid overriding .cargo/config.toml with native toolchain.
|
# Avoid overriding .cargo/config.toml with native toolchain.
|
||||||
"-DENABLE_RUST=OFF"
|
"-DENABLE_RUST=OFF"
|
||||||
"-DENABLE_CLICKHOUSE_SELF_EXTRACTING=${ENABLE_CLICKHOUSE_SELF_EXTRACTING}"
|
"-DENABLE_CLICKHOUSE_SELF_EXTRACTING=${ENABLE_CLICKHOUSE_SELF_EXTRACTING}"
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# Setup integration with ccache to speed up builds, see https://ccache.dev/
|
# Setup integration with ccache to speed up builds, see https://ccache.dev/
|
||||||
|
|
||||||
|
# Matches both ccache and sccache
|
||||||
if (CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache" OR CMAKE_C_COMPILER_LAUNCHER MATCHES "ccache")
|
if (CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache" OR CMAKE_C_COMPILER_LAUNCHER MATCHES "ccache")
|
||||||
# custom compiler launcher already defined, most likely because cmake was invoked with like "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache" or
|
# custom compiler launcher already defined, most likely because cmake was invoked with like "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache" or
|
||||||
# via environment variable --> respect setting and trust that the launcher was specified correctly
|
# via environment variable --> respect setting and trust that the launcher was specified correctly
|
||||||
@ -8,45 +9,65 @@ if (CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache" OR CMAKE_C_COMPILER_LAUNCHER MA
|
|||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option(ENABLE_CCACHE "Speedup re-compilations using ccache (external tool)" ON)
|
set(ENABLE_CCACHE "default" CACHE STRING "Deprecated, use COMPILER_CACHE=(auto|ccache|sccache|disabled)")
|
||||||
|
if (NOT ENABLE_CCACHE STREQUAL "default")
|
||||||
if (NOT ENABLE_CCACHE)
|
message(WARNING "The -DENABLE_CCACHE is deprecated in favor of -DCOMPILER_CACHE")
|
||||||
message(STATUS "Using ccache: no (disabled via configuration)")
|
endif()
|
||||||
return()
|
|
||||||
|
set(COMPILER_CACHE "auto" CACHE STRING "Speedup re-compilations using the caching tools; valid options are 'auto' (ccache, then sccache), 'ccache', 'sccache', or 'disabled'")
|
||||||
|
|
||||||
|
# It has pretty complex logic, because the ENABLE_CCACHE is deprecated, but still should
|
||||||
|
# control the COMPILER_CACHE
|
||||||
|
# After it will be completely removed, the following block will be much simpler
|
||||||
|
if (COMPILER_CACHE STREQUAL "ccache" OR (ENABLE_CCACHE AND NOT ENABLE_CCACHE STREQUAL "default"))
|
||||||
|
find_program (CCACHE_EXECUTABLE ccache)
|
||||||
|
elseif(COMPILER_CACHE STREQUAL "disabled" OR NOT ENABLE_CCACHE STREQUAL "default")
|
||||||
|
message(STATUS "Using *ccache: no (disabled via configuration)")
|
||||||
|
return()
|
||||||
|
elseif(COMPILER_CACHE STREQUAL "auto")
|
||||||
|
find_program (CCACHE_EXECUTABLE ccache sccache)
|
||||||
|
elseif(COMPILER_CACHE STREQUAL "sccache")
|
||||||
|
find_program (CCACHE_EXECUTABLE sccache)
|
||||||
|
else()
|
||||||
|
message(${RECONFIGURE_MESSAGE_LEVEL} "The COMPILER_CACHE must be one of (auto|ccache|sccache|disabled), given '${COMPILER_CACHE}'")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
find_program (CCACHE_EXECUTABLE ccache)
|
|
||||||
|
|
||||||
if (NOT CCACHE_EXECUTABLE)
|
if (NOT CCACHE_EXECUTABLE)
|
||||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Using ccache: no (Could not find find ccache. To significantly reduce compile times for the 2nd, 3rd, etc. build, it is highly recommended to install ccache. To suppress this message, run cmake with -DENABLE_CCACHE=0)")
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Using *ccache: no (Could not find find ccache or sccache. To significantly reduce compile times for the 2nd, 3rd, etc. build, it is highly recommended to install one of them. To suppress this message, run cmake with -DCOMPILER_CACHE=disabled)")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
execute_process(COMMAND ${CCACHE_EXECUTABLE} "-V" OUTPUT_VARIABLE CCACHE_VERSION)
|
if (CCACHE_EXECUTABLE MATCHES "/ccache$")
|
||||||
string(REGEX REPLACE "ccache version ([0-9\\.]+).*" "\\1" CCACHE_VERSION ${CCACHE_VERSION})
|
execute_process(COMMAND ${CCACHE_EXECUTABLE} "-V" OUTPUT_VARIABLE CCACHE_VERSION)
|
||||||
|
string(REGEX REPLACE "ccache version ([0-9\\.]+).*" "\\1" CCACHE_VERSION ${CCACHE_VERSION})
|
||||||
|
|
||||||
set (CCACHE_MINIMUM_VERSION 3.3)
|
set (CCACHE_MINIMUM_VERSION 3.3)
|
||||||
|
|
||||||
if (CCACHE_VERSION VERSION_LESS_EQUAL ${CCACHE_MINIMUM_VERSION})
|
if (CCACHE_VERSION VERSION_LESS_EQUAL ${CCACHE_MINIMUM_VERSION})
|
||||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Using ccache: no (found ${CCACHE_EXECUTABLE} (version ${CCACHE_VERSION}), the minimum required version is ${CCACHE_MINIMUM_VERSION}")
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Using ccache: no (found ${CCACHE_EXECUTABLE} (version ${CCACHE_VERSION}), the minimum required version is ${CCACHE_MINIMUM_VERSION}")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
message(STATUS "Using ccache: ${CCACHE_EXECUTABLE} (version ${CCACHE_VERSION})")
|
message(STATUS "Using ccache: ${CCACHE_EXECUTABLE} (version ${CCACHE_VERSION})")
|
||||||
set(LAUNCHER ${CCACHE_EXECUTABLE})
|
set(LAUNCHER ${CCACHE_EXECUTABLE})
|
||||||
|
|
||||||
# Work around a well-intended but unfortunate behavior of ccache 4.0 & 4.1 with
|
# Work around a well-intended but unfortunate behavior of ccache 4.0 & 4.1 with
|
||||||
# environment variable SOURCE_DATE_EPOCH. This variable provides an alternative
|
# environment variable SOURCE_DATE_EPOCH. This variable provides an alternative
|
||||||
# to source-code embedded timestamps (__DATE__/__TIME__) and therefore helps with
|
# to source-code embedded timestamps (__DATE__/__TIME__) and therefore helps with
|
||||||
# reproducible builds (*). SOURCE_DATE_EPOCH is set automatically by the
|
# reproducible builds (*). SOURCE_DATE_EPOCH is set automatically by the
|
||||||
# distribution, e.g. Debian. Ccache 4.0 & 4.1 incorporate SOURCE_DATE_EPOCH into
|
# distribution, e.g. Debian. Ccache 4.0 & 4.1 incorporate SOURCE_DATE_EPOCH into
|
||||||
# the hash calculation regardless they contain timestamps or not. This invalidates
|
# the hash calculation regardless they contain timestamps or not. This invalidates
|
||||||
# the cache whenever SOURCE_DATE_EPOCH changes. As a fix, ignore SOURCE_DATE_EPOCH.
|
# the cache whenever SOURCE_DATE_EPOCH changes. As a fix, ignore SOURCE_DATE_EPOCH.
|
||||||
#
|
#
|
||||||
# (*) https://reproducible-builds.org/specs/source-date-epoch/
|
# (*) https://reproducible-builds.org/specs/source-date-epoch/
|
||||||
if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0" AND CCACHE_VERSION VERSION_LESS "4.2")
|
if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0" AND CCACHE_VERSION VERSION_LESS "4.2")
|
||||||
message(STATUS "Ignore SOURCE_DATE_EPOCH for ccache 4.0 / 4.1")
|
message(STATUS "Ignore SOURCE_DATE_EPOCH for ccache 4.0 / 4.1")
|
||||||
set(LAUNCHER env -u SOURCE_DATE_EPOCH ${CCACHE_EXECUTABLE})
|
set(LAUNCHER env -u SOURCE_DATE_EPOCH ${CCACHE_EXECUTABLE})
|
||||||
|
endif()
|
||||||
|
elseif(CCACHE_EXECUTABLE MATCHES "/sccache$")
|
||||||
|
message(STATUS "Using sccache: ${CCACHE_EXECUTABLE}")
|
||||||
|
set(LAUNCHER ${CCACHE_EXECUTABLE})
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
set (CMAKE_CXX_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_CXX_COMPILER_LAUNCHER})
|
set (CMAKE_CXX_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_CXX_COMPILER_LAUNCHER})
|
||||||
|
@ -69,13 +69,14 @@ RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
|
|||||||
libc6 \
|
libc6 \
|
||||||
libc6-dev \
|
libc6-dev \
|
||||||
libc6-dev-arm64-cross \
|
libc6-dev-arm64-cross \
|
||||||
|
python3-boto3 \
|
||||||
yasm \
|
yasm \
|
||||||
zstd \
|
zstd \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists
|
&& rm -rf /var/lib/apt/lists
|
||||||
|
|
||||||
# Download toolchain and SDK for Darwin
|
# Download toolchain and SDK for Darwin
|
||||||
RUN wget -nv https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.0.sdk.tar.xz
|
RUN curl -sL -O https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.0.sdk.tar.xz
|
||||||
|
|
||||||
# Architecture of the image when BuildKit/buildx is used
|
# Architecture of the image when BuildKit/buildx is used
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
@ -97,7 +98,7 @@ ENV PATH="$PATH:/usr/local/go/bin"
|
|||||||
ENV GOPATH=/workdir/go
|
ENV GOPATH=/workdir/go
|
||||||
ENV GOCACHE=/workdir/
|
ENV GOCACHE=/workdir/
|
||||||
|
|
||||||
ARG CLANG_TIDY_SHA1=03644275e794b0587849bfc2ec6123d5ae0bdb1c
|
ARG CLANG_TIDY_SHA1=c191254ea00d47ade11d7170ef82fe038c213774
|
||||||
RUN curl -Lo /usr/bin/clang-tidy-cache \
|
RUN curl -Lo /usr/bin/clang-tidy-cache \
|
||||||
"https://raw.githubusercontent.com/matus-chochlik/ctcache/$CLANG_TIDY_SHA1/clang-tidy-cache" \
|
"https://raw.githubusercontent.com/matus-chochlik/ctcache/$CLANG_TIDY_SHA1/clang-tidy-cache" \
|
||||||
&& chmod +x /usr/bin/clang-tidy-cache
|
&& chmod +x /usr/bin/clang-tidy-cache
|
||||||
|
@ -6,6 +6,7 @@ exec &> >(ts)
|
|||||||
ccache_status () {
|
ccache_status () {
|
||||||
ccache --show-config ||:
|
ccache --show-config ||:
|
||||||
ccache --show-stats ||:
|
ccache --show-stats ||:
|
||||||
|
SCCACHE_NO_DAEMON=1 sccache --show-stats ||:
|
||||||
}
|
}
|
||||||
|
|
||||||
[ -O /build ] || git config --global --add safe.directory /build
|
[ -O /build ] || git config --global --add safe.directory /build
|
||||||
|
@ -5,13 +5,19 @@ import os
|
|||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from typing import List
|
from pathlib import Path
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
SCRIPT_PATH = os.path.realpath(__file__)
|
SCRIPT_PATH = Path(__file__).absolute()
|
||||||
IMAGE_TYPE = "binary"
|
IMAGE_TYPE = "binary"
|
||||||
|
IMAGE_NAME = f"clickhouse/{IMAGE_TYPE}-builder"
|
||||||
|
|
||||||
|
|
||||||
def check_image_exists_locally(image_name):
|
class BuildException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def check_image_exists_locally(image_name: str) -> bool:
|
||||||
try:
|
try:
|
||||||
output = subprocess.check_output(
|
output = subprocess.check_output(
|
||||||
f"docker images -q {image_name} 2> /dev/null", shell=True
|
f"docker images -q {image_name} 2> /dev/null", shell=True
|
||||||
@ -21,17 +27,17 @@ def check_image_exists_locally(image_name):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def pull_image(image_name):
|
def pull_image(image_name: str) -> bool:
|
||||||
try:
|
try:
|
||||||
subprocess.check_call(f"docker pull {image_name}", shell=True)
|
subprocess.check_call(f"docker pull {image_name}", shell=True)
|
||||||
return True
|
return True
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
logging.info(f"Cannot pull image {image_name}".format())
|
logging.info("Cannot pull image %s", image_name)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def build_image(image_name, filepath):
|
def build_image(image_name: str, filepath: Path) -> None:
|
||||||
context = os.path.dirname(filepath)
|
context = filepath.parent
|
||||||
build_cmd = f"docker build --network=host -t {image_name} -f {filepath} {context}"
|
build_cmd = f"docker build --network=host -t {image_name} -f {filepath} {context}"
|
||||||
logging.info("Will build image with cmd: '%s'", build_cmd)
|
logging.info("Will build image with cmd: '%s'", build_cmd)
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
@ -40,7 +46,7 @@ def build_image(image_name, filepath):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def pre_build(repo_path: str, env_variables: List[str]):
|
def pre_build(repo_path: Path, env_variables: List[str]):
|
||||||
if "WITH_PERFORMANCE=1" in env_variables:
|
if "WITH_PERFORMANCE=1" in env_variables:
|
||||||
current_branch = subprocess.check_output(
|
current_branch = subprocess.check_output(
|
||||||
"git branch --show-current", shell=True, encoding="utf-8"
|
"git branch --show-current", shell=True, encoding="utf-8"
|
||||||
@ -56,7 +62,9 @@ def pre_build(repo_path: str, env_variables: List[str]):
|
|||||||
# conclusion is: in the current state the easiest way to go is to force
|
# conclusion is: in the current state the easiest way to go is to force
|
||||||
# unshallow repository for performance artifacts.
|
# unshallow repository for performance artifacts.
|
||||||
# To change it we need to rework our performance tests docker image
|
# To change it we need to rework our performance tests docker image
|
||||||
raise Exception("shallow repository is not suitable for performance builds")
|
raise BuildException(
|
||||||
|
"shallow repository is not suitable for performance builds"
|
||||||
|
)
|
||||||
if current_branch != "master":
|
if current_branch != "master":
|
||||||
cmd = (
|
cmd = (
|
||||||
f"git -C {repo_path} fetch --no-recurse-submodules "
|
f"git -C {repo_path} fetch --no-recurse-submodules "
|
||||||
@ -67,14 +75,14 @@ def pre_build(repo_path: str, env_variables: List[str]):
|
|||||||
|
|
||||||
|
|
||||||
def run_docker_image_with_env(
|
def run_docker_image_with_env(
|
||||||
image_name,
|
image_name: str,
|
||||||
as_root,
|
as_root: bool,
|
||||||
output,
|
output_dir: Path,
|
||||||
env_variables,
|
env_variables: List[str],
|
||||||
ch_root,
|
ch_root: Path,
|
||||||
ccache_dir,
|
ccache_dir: Optional[Path],
|
||||||
docker_image_version,
|
|
||||||
):
|
):
|
||||||
|
output_dir.mkdir(parents=True, exist_ok=True)
|
||||||
env_part = " -e ".join(env_variables)
|
env_part = " -e ".join(env_variables)
|
||||||
if env_part:
|
if env_part:
|
||||||
env_part = " -e " + env_part
|
env_part = " -e " + env_part
|
||||||
@ -89,10 +97,14 @@ def run_docker_image_with_env(
|
|||||||
else:
|
else:
|
||||||
user = f"{os.geteuid()}:{os.getegid()}"
|
user = f"{os.geteuid()}:{os.getegid()}"
|
||||||
|
|
||||||
|
ccache_mount = f"--volume={ccache_dir}:/ccache"
|
||||||
|
if ccache_dir is None:
|
||||||
|
ccache_mount = ""
|
||||||
|
|
||||||
cmd = (
|
cmd = (
|
||||||
f"docker run --network=host --user={user} --rm --volume={output}:/output "
|
f"docker run --network=host --user={user} --rm {ccache_mount}"
|
||||||
f"--volume={ch_root}:/build --volume={ccache_dir}:/ccache {env_part} "
|
f"--volume={output_dir}:/output --volume={ch_root}:/build {env_part} "
|
||||||
f"{interactive} {image_name}:{docker_image_version}"
|
f"{interactive} {image_name}"
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.info("Will build ClickHouse pkg with cmd: '%s'", cmd)
|
logging.info("Will build ClickHouse pkg with cmd: '%s'", cmd)
|
||||||
@ -100,24 +112,25 @@ def run_docker_image_with_env(
|
|||||||
subprocess.check_call(cmd, shell=True)
|
subprocess.check_call(cmd, shell=True)
|
||||||
|
|
||||||
|
|
||||||
def is_release_build(build_type, package_type, sanitizer):
|
def is_release_build(build_type: str, package_type: str, sanitizer: str) -> bool:
|
||||||
return build_type == "" and package_type == "deb" and sanitizer == ""
|
return build_type == "" and package_type == "deb" and sanitizer == ""
|
||||||
|
|
||||||
|
|
||||||
def parse_env_variables(
|
def parse_env_variables(
|
||||||
build_type,
|
build_type: str,
|
||||||
compiler,
|
compiler: str,
|
||||||
sanitizer,
|
sanitizer: str,
|
||||||
package_type,
|
package_type: str,
|
||||||
cache,
|
cache: str,
|
||||||
distcc_hosts,
|
s3_bucket: str,
|
||||||
clang_tidy,
|
s3_directory: str,
|
||||||
version,
|
s3_rw_access: bool,
|
||||||
author,
|
clang_tidy: bool,
|
||||||
official,
|
version: str,
|
||||||
additional_pkgs,
|
official: bool,
|
||||||
with_coverage,
|
additional_pkgs: bool,
|
||||||
with_binaries,
|
with_coverage: bool,
|
||||||
|
with_binaries: str,
|
||||||
):
|
):
|
||||||
DARWIN_SUFFIX = "-darwin"
|
DARWIN_SUFFIX = "-darwin"
|
||||||
DARWIN_ARM_SUFFIX = "-darwin-aarch64"
|
DARWIN_ARM_SUFFIX = "-darwin-aarch64"
|
||||||
@ -243,32 +256,43 @@ def parse_env_variables(
|
|||||||
else:
|
else:
|
||||||
result.append("BUILD_TYPE=None")
|
result.append("BUILD_TYPE=None")
|
||||||
|
|
||||||
if cache == "distcc":
|
if not cache:
|
||||||
result.append(f"CCACHE_PREFIX={cache}")
|
cmake_flags.append("-DCOMPILER_CACHE=disabled")
|
||||||
|
|
||||||
if cache:
|
if cache == "ccache":
|
||||||
|
cmake_flags.append("-DCOMPILER_CACHE=ccache")
|
||||||
result.append("CCACHE_DIR=/ccache")
|
result.append("CCACHE_DIR=/ccache")
|
||||||
result.append("CCACHE_COMPRESSLEVEL=5")
|
result.append("CCACHE_COMPRESSLEVEL=5")
|
||||||
result.append("CCACHE_BASEDIR=/build")
|
result.append("CCACHE_BASEDIR=/build")
|
||||||
result.append("CCACHE_NOHASHDIR=true")
|
result.append("CCACHE_NOHASHDIR=true")
|
||||||
result.append("CCACHE_COMPILERCHECK=content")
|
result.append("CCACHE_COMPILERCHECK=content")
|
||||||
cache_maxsize = "15G"
|
result.append("CCACHE_MAXSIZE=15G")
|
||||||
if clang_tidy:
|
|
||||||
# 15G is not enough for tidy build
|
|
||||||
cache_maxsize = "25G"
|
|
||||||
|
|
||||||
# `CTCACHE_DIR` has the same purpose as the `CCACHE_DIR` above.
|
if cache == "sccache":
|
||||||
# It's there to have the clang-tidy cache embedded into our standard `CCACHE_DIR`
|
cmake_flags.append("-DCOMPILER_CACHE=sccache")
|
||||||
|
# see https://github.com/mozilla/sccache/blob/main/docs/S3.md
|
||||||
|
result.append(f"SCCACHE_BUCKET={s3_bucket}")
|
||||||
|
sccache_dir = "sccache"
|
||||||
|
if s3_directory:
|
||||||
|
sccache_dir = f"{s3_directory}/{sccache_dir}"
|
||||||
|
result.append(f"SCCACHE_S3_KEY_PREFIX={sccache_dir}")
|
||||||
|
if not s3_rw_access:
|
||||||
|
result.append("SCCACHE_S3_NO_CREDENTIALS=true")
|
||||||
|
|
||||||
|
if clang_tidy:
|
||||||
|
# `CTCACHE_DIR` has the same purpose as the `CCACHE_DIR` above.
|
||||||
|
# It's there to have the clang-tidy cache embedded into our standard `CCACHE_DIR`
|
||||||
|
if cache == "ccache":
|
||||||
result.append("CTCACHE_DIR=/ccache/clang-tidy-cache")
|
result.append("CTCACHE_DIR=/ccache/clang-tidy-cache")
|
||||||
result.append(f"CCACHE_MAXSIZE={cache_maxsize}")
|
if s3_bucket:
|
||||||
|
# see https://github.com/matus-chochlik/ctcache#environment-variables
|
||||||
if distcc_hosts:
|
ctcache_dir = "clang-tidy-cache"
|
||||||
hosts_with_params = [f"{host}/24,lzo" for host in distcc_hosts] + [
|
if s3_directory:
|
||||||
"localhost/`nproc`"
|
ctcache_dir = f"{s3_directory}/{ctcache_dir}"
|
||||||
]
|
result.append(f"CTCACHE_S3_BUCKET={s3_bucket}")
|
||||||
result.append('DISTCC_HOSTS="' + " ".join(hosts_with_params) + '"')
|
result.append(f"CTCACHE_S3_FOLDER={ctcache_dir}")
|
||||||
elif cache == "distcc":
|
if not s3_rw_access:
|
||||||
result.append('DISTCC_HOSTS="localhost/`nproc`"')
|
result.append("CTCACHE_S3_NO_CREDENTIALS=true")
|
||||||
|
|
||||||
if additional_pkgs:
|
if additional_pkgs:
|
||||||
# NOTE: This are the env for packages/build script
|
# NOTE: This are the env for packages/build script
|
||||||
@ -300,9 +324,6 @@ def parse_env_variables(
|
|||||||
if version:
|
if version:
|
||||||
result.append(f"VERSION_STRING='{version}'")
|
result.append(f"VERSION_STRING='{version}'")
|
||||||
|
|
||||||
if author:
|
|
||||||
result.append(f"AUTHOR='{author}'")
|
|
||||||
|
|
||||||
if official:
|
if official:
|
||||||
cmake_flags.append("-DCLICKHOUSE_OFFICIAL_BUILD=1")
|
cmake_flags.append("-DCLICKHOUSE_OFFICIAL_BUILD=1")
|
||||||
|
|
||||||
@ -312,14 +333,14 @@ def parse_env_variables(
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def dir_name(name: str) -> str:
|
def dir_name(name: str) -> Path:
|
||||||
if not os.path.isabs(name):
|
path = Path(name)
|
||||||
name = os.path.abspath(os.path.join(os.getcwd(), name))
|
if not path.is_absolute():
|
||||||
return name
|
path = Path.cwd() / name
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
def parse_args() -> argparse.Namespace:
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
||||||
description="ClickHouse building script using prebuilt Docker image",
|
description="ClickHouse building script using prebuilt Docker image",
|
||||||
@ -331,7 +352,7 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--clickhouse-repo-path",
|
"--clickhouse-repo-path",
|
||||||
default=os.path.join(os.path.dirname(SCRIPT_PATH), os.pardir, os.pardir),
|
default=SCRIPT_PATH.parents[2],
|
||||||
type=dir_name,
|
type=dir_name,
|
||||||
help="ClickHouse git repository",
|
help="ClickHouse git repository",
|
||||||
)
|
)
|
||||||
@ -361,17 +382,34 @@ if __name__ == "__main__":
|
|||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument("--clang-tidy", action="store_true")
|
parser.add_argument("--clang-tidy", action="store_true")
|
||||||
parser.add_argument("--cache", choices=("ccache", "distcc", ""), default="")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--ccache_dir",
|
"--cache",
|
||||||
default=os.getenv("HOME", "") + "/.ccache",
|
choices=("ccache", "sccache", ""),
|
||||||
|
default="",
|
||||||
|
help="ccache or sccache for objects caching; sccache uses only S3 buckets",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--ccache-dir",
|
||||||
|
default=Path.home() / ".ccache",
|
||||||
type=dir_name,
|
type=dir_name,
|
||||||
help="a directory with ccache",
|
help="a directory with ccache",
|
||||||
)
|
)
|
||||||
parser.add_argument("--distcc-hosts", nargs="+")
|
parser.add_argument(
|
||||||
|
"--s3-bucket",
|
||||||
|
help="an S3 bucket used for sscache and clang-tidy-cache",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--s3-directory",
|
||||||
|
default="ccache",
|
||||||
|
help="an S3 directory prefix used for sscache and clang-tidy-cache",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--s3-rw-access",
|
||||||
|
action="store_true",
|
||||||
|
help="if set, the build fails on errors writing cache to S3",
|
||||||
|
)
|
||||||
parser.add_argument("--force-build-image", action="store_true")
|
parser.add_argument("--force-build-image", action="store_true")
|
||||||
parser.add_argument("--version")
|
parser.add_argument("--version")
|
||||||
parser.add_argument("--author", default="clickhouse", help="a package author")
|
|
||||||
parser.add_argument("--official", action="store_true")
|
parser.add_argument("--official", action="store_true")
|
||||||
parser.add_argument("--additional-pkgs", action="store_true")
|
parser.add_argument("--additional-pkgs", action="store_true")
|
||||||
parser.add_argument("--with-coverage", action="store_true")
|
parser.add_argument("--with-coverage", action="store_true")
|
||||||
@ -387,34 +425,54 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
image_name = f"clickhouse/{IMAGE_TYPE}-builder"
|
if args.additional_pkgs and args.package_type != "deb":
|
||||||
|
raise argparse.ArgumentTypeError(
|
||||||
|
"Can build additional packages only in deb build"
|
||||||
|
)
|
||||||
|
|
||||||
|
if args.cache != "ccache":
|
||||||
|
args.ccache_dir = None
|
||||||
|
|
||||||
|
if args.with_binaries != "":
|
||||||
|
if args.package_type != "deb":
|
||||||
|
raise argparse.ArgumentTypeError(
|
||||||
|
"Can add additional binaries only in deb build"
|
||||||
|
)
|
||||||
|
logging.info("Should place %s to output", args.with_binaries)
|
||||||
|
|
||||||
|
if args.cache == "sccache":
|
||||||
|
if not args.s3_bucket:
|
||||||
|
raise argparse.ArgumentTypeError("sccache must have --s3-bucket set")
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
ch_root = args.clickhouse_repo_path
|
ch_root = args.clickhouse_repo_path
|
||||||
|
|
||||||
if args.additional_pkgs and args.package_type != "deb":
|
dockerfile = ch_root / "docker/packager" / IMAGE_TYPE / "Dockerfile"
|
||||||
raise Exception("Can build additional packages only in deb build")
|
image_with_version = IMAGE_NAME + ":" + args.docker_image_version
|
||||||
|
if args.force_build_image:
|
||||||
|
build_image(image_with_version, dockerfile)
|
||||||
|
elif not (
|
||||||
|
check_image_exists_locally(image_with_version) or pull_image(image_with_version)
|
||||||
|
):
|
||||||
|
build_image(image_with_version, dockerfile)
|
||||||
|
|
||||||
if args.with_binaries != "" and args.package_type != "deb":
|
|
||||||
raise Exception("Can add additional binaries only in deb build")
|
|
||||||
|
|
||||||
if args.with_binaries != "" and args.package_type == "deb":
|
|
||||||
logging.info("Should place %s to output", args.with_binaries)
|
|
||||||
|
|
||||||
dockerfile = os.path.join(ch_root, "docker/packager", IMAGE_TYPE, "Dockerfile")
|
|
||||||
image_with_version = image_name + ":" + args.docker_image_version
|
|
||||||
if not check_image_exists_locally(image_name) or args.force_build_image:
|
|
||||||
if not pull_image(image_with_version) or args.force_build_image:
|
|
||||||
build_image(image_with_version, dockerfile)
|
|
||||||
env_prepared = parse_env_variables(
|
env_prepared = parse_env_variables(
|
||||||
args.build_type,
|
args.build_type,
|
||||||
args.compiler,
|
args.compiler,
|
||||||
args.sanitizer,
|
args.sanitizer,
|
||||||
args.package_type,
|
args.package_type,
|
||||||
args.cache,
|
args.cache,
|
||||||
args.distcc_hosts,
|
args.s3_bucket,
|
||||||
|
args.s3_directory,
|
||||||
|
args.s3_rw_access,
|
||||||
args.clang_tidy,
|
args.clang_tidy,
|
||||||
args.version,
|
args.version,
|
||||||
args.author,
|
|
||||||
args.official,
|
args.official,
|
||||||
args.additional_pkgs,
|
args.additional_pkgs,
|
||||||
args.with_coverage,
|
args.with_coverage,
|
||||||
@ -423,12 +481,15 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
pre_build(args.clickhouse_repo_path, env_prepared)
|
pre_build(args.clickhouse_repo_path, env_prepared)
|
||||||
run_docker_image_with_env(
|
run_docker_image_with_env(
|
||||||
image_name,
|
image_with_version,
|
||||||
args.as_root,
|
args.as_root,
|
||||||
args.output_dir,
|
args.output_dir,
|
||||||
env_prepared,
|
env_prepared,
|
||||||
ch_root,
|
ch_root,
|
||||||
args.ccache_dir,
|
args.ccache_dir,
|
||||||
args.docker_image_version,
|
|
||||||
)
|
)
|
||||||
logging.info("Output placed into %s", args.output_dir)
|
logging.info("Output placed into %s", args.output_dir)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
@ -20,12 +20,6 @@ RUN apt-get update \
|
|||||||
zstd \
|
zstd \
|
||||||
--yes --no-install-recommends
|
--yes --no-install-recommends
|
||||||
|
|
||||||
# Install CMake 3.20+ for Rust compilation
|
|
||||||
RUN apt purge cmake --yes
|
|
||||||
RUN wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null
|
|
||||||
RUN apt-add-repository 'deb https://apt.kitware.com/ubuntu/ focal main'
|
|
||||||
RUN apt update && apt install cmake --yes
|
|
||||||
|
|
||||||
RUN pip3 install numpy scipy pandas Jinja2
|
RUN pip3 install numpy scipy pandas Jinja2
|
||||||
|
|
||||||
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
|
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
|
||||||
|
@ -16,7 +16,8 @@ export LLVM_VERSION=${LLVM_VERSION:-13}
|
|||||||
# it being undefined. Also read it as array so that we can pass an empty list
|
# it being undefined. Also read it as array so that we can pass an empty list
|
||||||
# of additional variable to cmake properly, and it doesn't generate an extra
|
# of additional variable to cmake properly, and it doesn't generate an extra
|
||||||
# empty parameter.
|
# empty parameter.
|
||||||
read -ra FASTTEST_CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}"
|
# Read it as CMAKE_FLAGS to not lose exported FASTTEST_CMAKE_FLAGS on subsequential launch
|
||||||
|
read -ra CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}"
|
||||||
|
|
||||||
# Run only matching tests.
|
# Run only matching tests.
|
||||||
FASTTEST_FOCUS=${FASTTEST_FOCUS:-""}
|
FASTTEST_FOCUS=${FASTTEST_FOCUS:-""}
|
||||||
@ -37,6 +38,13 @@ export FASTTEST_DATA
|
|||||||
export FASTTEST_OUT
|
export FASTTEST_OUT
|
||||||
export PATH
|
export PATH
|
||||||
|
|
||||||
|
function ccache_status
|
||||||
|
{
|
||||||
|
ccache --show-config ||:
|
||||||
|
ccache --show-stats ||:
|
||||||
|
SCCACHE_NO_DAEMON=1 sccache --show-stats ||:
|
||||||
|
}
|
||||||
|
|
||||||
function start_server
|
function start_server
|
||||||
{
|
{
|
||||||
set -m # Spawn server in its own process groups
|
set -m # Spawn server in its own process groups
|
||||||
@ -171,14 +179,14 @@ function run_cmake
|
|||||||
export CCACHE_COMPILERCHECK=content
|
export CCACHE_COMPILERCHECK=content
|
||||||
export CCACHE_MAXSIZE=15G
|
export CCACHE_MAXSIZE=15G
|
||||||
|
|
||||||
ccache --show-stats ||:
|
ccache_status
|
||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
|
|
||||||
mkdir "$FASTTEST_BUILD" ||:
|
mkdir "$FASTTEST_BUILD" ||:
|
||||||
|
|
||||||
(
|
(
|
||||||
cd "$FASTTEST_BUILD"
|
cd "$FASTTEST_BUILD"
|
||||||
cmake "$FASTTEST_SOURCE" -DCMAKE_CXX_COMPILER="clang++-${LLVM_VERSION}" -DCMAKE_C_COMPILER="clang-${LLVM_VERSION}" "${CMAKE_LIBS_CONFIG[@]}" "${FASTTEST_CMAKE_FLAGS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/cmake_log.txt"
|
cmake "$FASTTEST_SOURCE" -DCMAKE_CXX_COMPILER="clang++-${LLVM_VERSION}" -DCMAKE_C_COMPILER="clang-${LLVM_VERSION}" "${CMAKE_LIBS_CONFIG[@]}" "${CMAKE_FLAGS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/cmake_log.txt"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -193,7 +201,7 @@ function build
|
|||||||
strip programs/clickhouse -o "$FASTTEST_OUTPUT/clickhouse-stripped"
|
strip programs/clickhouse -o "$FASTTEST_OUTPUT/clickhouse-stripped"
|
||||||
zstd --threads=0 "$FASTTEST_OUTPUT/clickhouse-stripped"
|
zstd --threads=0 "$FASTTEST_OUTPUT/clickhouse-stripped"
|
||||||
fi
|
fi
|
||||||
ccache --show-stats ||:
|
ccache_status
|
||||||
ccache --evict-older-than 1d ||:
|
ccache --evict-older-than 1d ||:
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -92,4 +92,17 @@ RUN mkdir /tmp/ccache \
|
|||||||
&& cd / \
|
&& cd / \
|
||||||
&& rm -rf /tmp/ccache
|
&& rm -rf /tmp/ccache
|
||||||
|
|
||||||
|
ARG TARGETARCH
|
||||||
|
ARG SCCACHE_VERSION=v0.4.1
|
||||||
|
RUN arch=${TARGETARCH:-amd64} \
|
||||||
|
&& case $arch in \
|
||||||
|
amd64) rarch=x86_64 ;; \
|
||||||
|
arm64) rarch=aarch64 ;; \
|
||||||
|
esac \
|
||||||
|
&& curl -Ls "https://github.com/mozilla/sccache/releases/download/$SCCACHE_VERSION/sccache-$SCCACHE_VERSION-$rarch-unknown-linux-musl.tar.gz" | \
|
||||||
|
tar xz -C /tmp \
|
||||||
|
&& mv "/tmp/sccache-$SCCACHE_VERSION-$rarch-unknown-linux-musl/sccache" /usr/bin \
|
||||||
|
&& rm "/tmp/sccache-$SCCACHE_VERSION-$rarch-unknown-linux-musl" -r
|
||||||
|
|
||||||
|
|
||||||
COPY process_functional_tests_result.py /
|
COPY process_functional_tests_result.py /
|
||||||
|
@ -6,15 +6,12 @@ import json
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from shutil import rmtree
|
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
from ccache_utils import get_ccache_if_not_exists, upload_ccache
|
|
||||||
from ci_config import CI_CONFIG, BuildConfig
|
from ci_config import CI_CONFIG, BuildConfig
|
||||||
from commit_status_helper import get_commit_filtered_statuses, get_commit
|
from commit_status_helper import get_commit_filtered_statuses, get_commit
|
||||||
from docker_pull_helper import get_image_with_version
|
from docker_pull_helper import get_image_with_version
|
||||||
from env_helper import (
|
from env_helper import (
|
||||||
CACHES_PATH,
|
|
||||||
GITHUB_JOB,
|
GITHUB_JOB,
|
||||||
IMAGES_PATH,
|
IMAGES_PATH,
|
||||||
REPO_COPY,
|
REPO_COPY,
|
||||||
@ -54,7 +51,6 @@ def get_packager_cmd(
|
|||||||
output_path: str,
|
output_path: str,
|
||||||
build_version: str,
|
build_version: str,
|
||||||
image_version: str,
|
image_version: str,
|
||||||
ccache_path: str,
|
|
||||||
official: bool,
|
official: bool,
|
||||||
) -> str:
|
) -> str:
|
||||||
package_type = build_config["package_type"]
|
package_type = build_config["package_type"]
|
||||||
@ -72,8 +68,9 @@ def get_packager_cmd(
|
|||||||
if build_config["tidy"] == "enable":
|
if build_config["tidy"] == "enable":
|
||||||
cmd += " --clang-tidy"
|
cmd += " --clang-tidy"
|
||||||
|
|
||||||
cmd += " --cache=ccache"
|
cmd += " --cache=sccache"
|
||||||
cmd += f" --ccache_dir={ccache_path}"
|
cmd += " --s3-rw-access"
|
||||||
|
cmd += f" --s3-bucket={S3_BUILDS_BUCKET}"
|
||||||
|
|
||||||
if "additional_pkgs" in build_config and build_config["additional_pkgs"]:
|
if "additional_pkgs" in build_config and build_config["additional_pkgs"]:
|
||||||
cmd += " --additional-pkgs"
|
cmd += " --additional-pkgs"
|
||||||
@ -314,29 +311,12 @@ def main():
|
|||||||
if not os.path.exists(build_output_path):
|
if not os.path.exists(build_output_path):
|
||||||
os.makedirs(build_output_path)
|
os.makedirs(build_output_path)
|
||||||
|
|
||||||
ccache_path = os.path.join(CACHES_PATH, build_name + "_ccache")
|
|
||||||
|
|
||||||
logging.info("Will try to fetch cache for our build")
|
|
||||||
try:
|
|
||||||
get_ccache_if_not_exists(
|
|
||||||
ccache_path, s3_helper, pr_info.number, TEMP_PATH, pr_info.release_pr
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
# In case there are issues with ccache, remove the path and do not fail a build
|
|
||||||
logging.info("Failed to get ccache, building without it. Error: %s", e)
|
|
||||||
rmtree(ccache_path, ignore_errors=True)
|
|
||||||
|
|
||||||
if not os.path.exists(ccache_path):
|
|
||||||
logging.info("cache was not fetched, will create empty dir")
|
|
||||||
os.makedirs(ccache_path)
|
|
||||||
|
|
||||||
packager_cmd = get_packager_cmd(
|
packager_cmd = get_packager_cmd(
|
||||||
build_config,
|
build_config,
|
||||||
os.path.join(REPO_COPY, "docker/packager"),
|
os.path.join(REPO_COPY, "docker/packager"),
|
||||||
build_output_path,
|
build_output_path,
|
||||||
version.string,
|
version.string,
|
||||||
image_version,
|
image_version,
|
||||||
ccache_path,
|
|
||||||
official_flag,
|
official_flag,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -352,13 +332,8 @@ def main():
|
|||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
f"sudo chown -R ubuntu:ubuntu {build_output_path}", shell=True
|
f"sudo chown -R ubuntu:ubuntu {build_output_path}", shell=True
|
||||||
)
|
)
|
||||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {ccache_path}", shell=True)
|
|
||||||
logging.info("Build finished with %s, log path %s", success, log_path)
|
logging.info("Build finished with %s, log path %s", success, log_path)
|
||||||
|
|
||||||
# Upload the ccache first to have the least build time in case of problems
|
|
||||||
logging.info("Will upload cache")
|
|
||||||
upload_ccache(ccache_path, s3_helper, pr_info.number, TEMP_PATH)
|
|
||||||
|
|
||||||
# FIXME performance
|
# FIXME performance
|
||||||
performance_urls = []
|
performance_urls = []
|
||||||
performance_path = os.path.join(build_output_path, "performance.tar.zst")
|
performance_path = os.path.join(build_output_path, "performance.tar.zst")
|
||||||
|
@ -11,7 +11,6 @@ from typing import List, Tuple
|
|||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
from ccache_utils import get_ccache_if_not_exists, upload_ccache
|
|
||||||
from clickhouse_helper import (
|
from clickhouse_helper import (
|
||||||
ClickHouseHelper,
|
ClickHouseHelper,
|
||||||
mark_flaky_tests,
|
mark_flaky_tests,
|
||||||
@ -22,7 +21,7 @@ from commit_status_helper import (
|
|||||||
update_mergeable_check,
|
update_mergeable_check,
|
||||||
)
|
)
|
||||||
from docker_pull_helper import get_image_with_version
|
from docker_pull_helper import get_image_with_version
|
||||||
from env_helper import CACHES_PATH, TEMP_PATH
|
from env_helper import S3_BUILDS_BUCKET, TEMP_PATH
|
||||||
from get_robot_token import get_best_robot_token
|
from get_robot_token import get_best_robot_token
|
||||||
from pr_info import FORCE_TESTS_LABEL, PRInfo
|
from pr_info import FORCE_TESTS_LABEL, PRInfo
|
||||||
from report import TestResults, read_test_results
|
from report import TestResults, read_test_results
|
||||||
@ -38,24 +37,22 @@ NAME = "Fast test"
|
|||||||
csv.field_size_limit(sys.maxsize)
|
csv.field_size_limit(sys.maxsize)
|
||||||
|
|
||||||
|
|
||||||
def get_fasttest_cmd(
|
def get_fasttest_cmd(workspace, output_path, repo_path, pr_number, commit_sha, image):
|
||||||
workspace, output_path, ccache_path, repo_path, pr_number, commit_sha, image
|
|
||||||
):
|
|
||||||
return (
|
return (
|
||||||
f"docker run --cap-add=SYS_PTRACE "
|
f"docker run --cap-add=SYS_PTRACE "
|
||||||
|
"--network=host " # required to get access to IAM credentials
|
||||||
f"-e FASTTEST_WORKSPACE=/fasttest-workspace -e FASTTEST_OUTPUT=/test_output "
|
f"-e FASTTEST_WORKSPACE=/fasttest-workspace -e FASTTEST_OUTPUT=/test_output "
|
||||||
f"-e FASTTEST_SOURCE=/ClickHouse --cap-add=SYS_PTRACE "
|
f"-e FASTTEST_SOURCE=/ClickHouse --cap-add=SYS_PTRACE "
|
||||||
|
f"-e FASTTEST_CMAKE_FLAGS='-DCOMPILER_CACHE=sccache' "
|
||||||
f"-e PULL_REQUEST_NUMBER={pr_number} -e COMMIT_SHA={commit_sha} "
|
f"-e PULL_REQUEST_NUMBER={pr_number} -e COMMIT_SHA={commit_sha} "
|
||||||
f"-e COPY_CLICKHOUSE_BINARY_TO_OUTPUT=1 "
|
f"-e COPY_CLICKHOUSE_BINARY_TO_OUTPUT=1 "
|
||||||
|
f"-e SCCACHE_BUCKET={S3_BUILDS_BUCKET} -e SCCACHE_S3_KEY_PREFIX=ccache/sccache "
|
||||||
f"--volume={workspace}:/fasttest-workspace --volume={repo_path}:/ClickHouse "
|
f"--volume={workspace}:/fasttest-workspace --volume={repo_path}:/ClickHouse "
|
||||||
f"--volume={output_path}:/test_output "
|
f"--volume={output_path}:/test_output {image}"
|
||||||
f"--volume={ccache_path}:/fasttest-workspace/ccache {image}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def process_results(
|
def process_results(result_folder: str) -> Tuple[str, str, TestResults, List[str]]:
|
||||||
result_folder: str,
|
|
||||||
) -> Tuple[str, str, TestResults, List[str]]:
|
|
||||||
test_results = [] # type: TestResults
|
test_results = [] # type: TestResults
|
||||||
additional_files = []
|
additional_files = []
|
||||||
# Just upload all files from result_folder.
|
# Just upload all files from result_folder.
|
||||||
@ -129,21 +126,6 @@ def main():
|
|||||||
if not os.path.exists(output_path):
|
if not os.path.exists(output_path):
|
||||||
os.makedirs(output_path)
|
os.makedirs(output_path)
|
||||||
|
|
||||||
if not os.path.exists(CACHES_PATH):
|
|
||||||
os.makedirs(CACHES_PATH)
|
|
||||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {CACHES_PATH}", shell=True)
|
|
||||||
cache_path = os.path.join(CACHES_PATH, "fasttest")
|
|
||||||
|
|
||||||
logging.info("Will try to fetch cache for our build")
|
|
||||||
ccache_for_pr = get_ccache_if_not_exists(
|
|
||||||
cache_path, s3_helper, pr_info.number, temp_path, pr_info.release_pr
|
|
||||||
)
|
|
||||||
upload_master_ccache = ccache_for_pr in (-1, 0)
|
|
||||||
|
|
||||||
if not os.path.exists(cache_path):
|
|
||||||
logging.info("cache was not fetched, will create empty dir")
|
|
||||||
os.makedirs(cache_path)
|
|
||||||
|
|
||||||
repo_path = os.path.join(temp_path, "fasttest-repo")
|
repo_path = os.path.join(temp_path, "fasttest-repo")
|
||||||
if not os.path.exists(repo_path):
|
if not os.path.exists(repo_path):
|
||||||
os.makedirs(repo_path)
|
os.makedirs(repo_path)
|
||||||
@ -151,7 +133,6 @@ def main():
|
|||||||
run_cmd = get_fasttest_cmd(
|
run_cmd = get_fasttest_cmd(
|
||||||
workspace,
|
workspace,
|
||||||
output_path,
|
output_path,
|
||||||
cache_path,
|
|
||||||
repo_path,
|
repo_path,
|
||||||
pr_info.number,
|
pr_info.number,
|
||||||
pr_info.sha,
|
pr_info.sha,
|
||||||
@ -172,7 +153,6 @@ def main():
|
|||||||
logging.info("Run failed")
|
logging.info("Run failed")
|
||||||
|
|
||||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
|
||||||
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {cache_path}", shell=True)
|
|
||||||
|
|
||||||
test_output_files = os.listdir(output_path)
|
test_output_files = os.listdir(output_path)
|
||||||
additional_logs = []
|
additional_logs = []
|
||||||
@ -202,12 +182,6 @@ def main():
|
|||||||
else:
|
else:
|
||||||
state, description, test_results, additional_logs = process_results(output_path)
|
state, description, test_results, additional_logs = process_results(output_path)
|
||||||
|
|
||||||
logging.info("Will upload cache")
|
|
||||||
upload_ccache(cache_path, s3_helper, pr_info.number, temp_path)
|
|
||||||
if upload_master_ccache:
|
|
||||||
logging.info("Will upload a fallback cache for master")
|
|
||||||
upload_ccache(cache_path, s3_helper, 0, temp_path)
|
|
||||||
|
|
||||||
ch_helper = ClickHouseHelper()
|
ch_helper = ClickHouseHelper()
|
||||||
mark_flaky_tests(ch_helper, NAME, test_results)
|
mark_flaky_tests(ch_helper, NAME, test_results)
|
||||||
|
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
test_shard_localhost
|
test_shard_localhost
|
||||||
test_shard_localhost 1 1 1 localhost ::1 9000 1 default
|
test_cluster_one_shard_two_replicas 1 1 1 127.0.0.1 127.0.0.1 9000 1 default
|
||||||
|
test_cluster_one_shard_two_replicas 1 1 2 127.0.0.2 127.0.0.2 9000 0 default
|
||||||
|
@ -6,4 +6,5 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
|||||||
|
|
||||||
$CLICKHOUSE_CLIENT -q "show clusters like 'test_shard%' limit 1"
|
$CLICKHOUSE_CLIENT -q "show clusters like 'test_shard%' limit 1"
|
||||||
# cluster,shard_num,shard_weight,replica_num,host_name,host_address,port,is_local,user,default_database[,errors_count,slowdowns_count,estimated_recovery_time]
|
# cluster,shard_num,shard_weight,replica_num,host_name,host_address,port,is_local,user,default_database[,errors_count,slowdowns_count,estimated_recovery_time]
|
||||||
$CLICKHOUSE_CLIENT -q "show cluster 'test_shard_localhost'" | cut -f-10
|
# use a cluster with static IPv4
|
||||||
|
$CLICKHOUSE_CLIENT -q "show cluster 'test_cluster_one_shard_two_replicas'" | cut -f-10
|
||||||
|
Loading…
Reference in New Issue
Block a user