diff --git a/.github/ISSUE_TEMPLATE/10_question.md b/.github/ISSUE_TEMPLATE/10_question.md
index a112b9599d5..5b3d00a3180 100644
--- a/.github/ISSUE_TEMPLATE/10_question.md
+++ b/.github/ISSUE_TEMPLATE/10_question.md
@@ -7,6 +7,6 @@ assignees: ''
---
-> Make sure to check documentation https://clickhouse.yandex/docs/en/ first. If the question is concise and probably has a short answer, asking it in Telegram chat https://telegram.me/clickhouse_en is probably the fastest way to find the answer. For more complicated questions, consider asking them on StackOverflow with "clickhouse" tag https://stackoverflow.com/questions/tagged/clickhouse
+> Make sure to check documentation https://clickhouse.com/docs/en/ first. If the question is concise and probably has a short answer, asking it in Telegram chat https://telegram.me/clickhouse_en is probably the fastest way to find the answer. For more complicated questions, consider asking them on StackOverflow with "clickhouse" tag https://stackoverflow.com/questions/tagged/clickhouse
> If you still prefer GitHub issues, remove all this text and ask your question here.
diff --git a/.github/ISSUE_TEMPLATE/50_build-issue.md b/.github/ISSUE_TEMPLATE/50_build-issue.md
index a358575cd7c..9b05fbbdd13 100644
--- a/.github/ISSUE_TEMPLATE/50_build-issue.md
+++ b/.github/ISSUE_TEMPLATE/50_build-issue.md
@@ -7,7 +7,7 @@ assignees: ''
---
-> Make sure that `git diff` result is empty and you've just pulled fresh master. Try cleaning up cmake cache. Just in case, official build instructions are published here: https://clickhouse.yandex/docs/en/development/build/
+> Make sure that `git diff` result is empty and you've just pulled fresh master. Try cleaning up cmake cache. Just in case, official build instructions are published here: https://clickhouse.com/docs/en/development/build/
**Operating system**
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
index a172947b2fc..5b47f94a324 100644
--- a/.github/workflows/nightly.yml
+++ b/.github/workflows/nightly.yml
@@ -7,6 +7,7 @@ env:
"on":
schedule:
- cron: '13 3 * * *'
+ workflow_dispatch:
jobs:
DockerHubPushAarch64:
diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml
index 4a3880543c4..d50a2151f2f 100644
--- a/.github/workflows/pull_request.yml
+++ b/.github/workflows/pull_request.yml
@@ -1733,6 +1733,51 @@ jobs:
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"
+ TestsBugfixCheck:
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ TEMP_PATH=${{runner.temp}}/tests_bugfix_check
+ REPORTS_PATH=${{runner.temp}}/reports_dir
+ CHECK_NAME=Tests bugfix validate check (actions)
+ KILL_TIMEOUT=3600
+ REPO_COPY=${{runner.temp}}/tests_bugfix_check/ClickHouse
+ EOF
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{ env.REPORTS_PATH }}
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Bugfix test
+ run: |
+ sudo rm -fr "$TEMP_PATH"
+ mkdir -p "$TEMP_PATH"
+ cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
+ cd "$REPO_COPY/tests/ci"
+
+ TEMP_PATH="${TEMP_PATH}/integration" \
+ REPORTS_PATH="${REPORTS_PATH}/integration" \
+ python3 integration_test_check.py "Integration tests bugfix validate check" \
+ --validate-bugfix --post-commit-status=file || echo 'ignore exit code'
+
+ TEMP_PATH="${TEMP_PATH}/stateless" \
+ REPORTS_PATH="${REPORTS_PATH}/stateless" \
+ python3 functional_test_check.py "Stateless tests bugfix validate check" "$KILL_TIMEOUT" \
+ --validate-bugfix --post-commit-status=file || echo 'ignore exit code'
+
+ python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/post_commit_status.tsv" "${TEMP_PATH}/integration/post_commit_status.tsv"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill "$(docker ps -q)" ||:
+ docker rm -f "$(docker ps -a -q)" ||:
+ sudo rm -fr "$TEMP_PATH"
##############################################################################################
############################ FUNCTIONAl STATEFUL TESTS #######################################
##############################################################################################
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 02e15584d2e..61724ab2d0c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,11 @@
-### ClickHouse release v22.3-lts, 2022-03-17
+### Table of Contents
+**[ClickHouse release v22.3-lts, 2022-03-17](#223)**
+**[ClickHouse release v22.2, 2022-02-17](#222)**
+**[ClickHouse release v22.1, 2022-01-18](#221)**
+**[Changelog for 2021](https://github.com/ClickHouse/ClickHouse/blob/master/docs/en/whats-new/changelog/2021.md)**
+
+
+## ClickHouse release v22.3-lts, 2022-03-17
#### Backward Incompatible Change
@@ -125,7 +132,7 @@
* Fix inconsistency of `max_query_size` limitation in distributed subqueries. [#34078](https://github.com/ClickHouse/ClickHouse/pull/34078) ([Chao Ma](https://github.com/godliness)).
-### ClickHouse release v22.2, 2022-02-17
+### ClickHouse release v22.2, 2022-02-17
#### Upgrade Notes
@@ -301,7 +308,7 @@
* This PR allows using multiple LDAP storages in the same list of user directories. It worked earlier but was broken because LDAP tests are disabled (they are part of the testflows tests). [#33574](https://github.com/ClickHouse/ClickHouse/pull/33574) ([Vitaly Baranov](https://github.com/vitlibar)).
-### ClickHouse release v22.1, 2022-01-18
+### ClickHouse release v22.1, 2022-01-18
#### Upgrade Notes
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 9649fc32d74..7ed3872fd6e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -267,7 +267,10 @@ endif ()
# Allows to build stripped binary in a separate directory
if (OBJCOPY_PATH AND READELF_PATH)
- set(BUILD_STRIPPED_BINARIES_PREFIX "" CACHE STRING "Build stripped binaries with debug info in separate directory")
+ option(INSTALL_STRIPPED_BINARIES "Build stripped binaries with debug info in separate directory" OFF)
+ if (INSTALL_STRIPPED_BINARIES)
+ set(STRIPPED_BINARIES_OUTPUT "stripped" CACHE STRING "A separate directory for stripped information")
+ endif()
endif()
cmake_host_system_information(RESULT AVAILABLE_PHYSICAL_MEMORY QUERY AVAILABLE_PHYSICAL_MEMORY) # Not available under freebsd
diff --git a/benchmark/greenplum/result_parser.py b/benchmark/greenplum/result_parser.py
index 8af20d265a0..4ed1aa5c4a5 100755
--- a/benchmark/greenplum/result_parser.py
+++ b/benchmark/greenplum/result_parser.py
@@ -4,11 +4,12 @@
import sys
import json
+
def parse_block(block=[], options=[]):
- #print('block is here', block)
- #show_query = False
- #show_query = options.show_query
+ # print('block is here', block)
+ # show_query = False
+ # show_query = options.show_query
result = []
query = block[0].strip()
if len(block) > 4:
@@ -20,9 +21,9 @@ def parse_block(block=[], options=[]):
timing2 = block[2].strip().split()[1]
timing3 = block[3].strip().split()[1]
if options.show_queries:
- result.append( query )
+ result.append(query)
if not options.show_first_timings:
- result += [ timing1 , timing2, timing3 ]
+ result += [timing1, timing2, timing3]
else:
result.append(timing1)
return result
@@ -37,12 +38,12 @@ def read_stats_file(options, fname):
for line in f.readlines():
- if 'SELECT' in line:
+ if "SELECT" in line:
if len(block) > 1:
- result.append( parse_block(block, options) )
- block = [ line ]
- elif 'Time:' in line:
- block.append( line )
+ result.append(parse_block(block, options))
+ block = [line]
+ elif "Time:" in line:
+ block.append(line)
return result
@@ -50,7 +51,7 @@ def read_stats_file(options, fname):
def compare_stats_files(options, arguments):
result = []
file_output = []
- pyplot_colors = ['y', 'b', 'g', 'r']
+ pyplot_colors = ["y", "b", "g", "r"]
for fname in arguments[1:]:
file_output.append((read_stats_file(options, fname)))
if len(file_output[0]) > 0:
@@ -58,65 +59,92 @@ def compare_stats_files(options, arguments):
for idx, data_set in enumerate(file_output):
int_result = []
for timing in data_set:
- int_result.append(float(timing[0])) #y values
- result.append([[x for x in range(0, len(int_result)) ], int_result,
-pyplot_colors[idx] + '^' ] )
-# result.append([x for x in range(1, len(int_result)) ]) #x values
-# result.append( pyplot_colors[idx] + '^' )
+ int_result.append(float(timing[0])) # y values
+ result.append(
+ [
+ [x for x in range(0, len(int_result))],
+ int_result,
+ pyplot_colors[idx] + "^",
+ ]
+ )
+ # result.append([x for x in range(1, len(int_result)) ]) #x values
+ # result.append( pyplot_colors[idx] + '^' )
return result
+
def parse_args():
from optparse import OptionParser
- parser = OptionParser(usage='usage: %prog [options] [result_file_path]..')
- parser.add_option("-q", "--show-queries", help="Show statements along with timings", action="store_true", dest="show_queries")
- parser.add_option("-f", "--show-first-timings", help="Show only first tries timings", action="store_true", dest="show_first_timings")
- parser.add_option("-c", "--compare-mode", help="Prepare output for pyplot comparing result files.", action="store", dest="compare_mode")
+
+ parser = OptionParser(usage="usage: %prog [options] [result_file_path]..")
+ parser.add_option(
+ "-q",
+ "--show-queries",
+ help="Show statements along with timings",
+ action="store_true",
+ dest="show_queries",
+ )
+ parser.add_option(
+ "-f",
+ "--show-first-timings",
+ help="Show only first tries timings",
+ action="store_true",
+ dest="show_first_timings",
+ )
+ parser.add_option(
+ "-c",
+ "--compare-mode",
+ help="Prepare output for pyplot comparing result files.",
+ action="store",
+ dest="compare_mode",
+ )
(options, arguments) = parser.parse_args(sys.argv)
if len(arguments) < 2:
parser.print_usage()
sys.exit(1)
- return ( options, arguments )
+ return (options, arguments)
+
def gen_pyplot_code(options, arguments):
- result = ''
+ result = ""
data_sets = compare_stats_files(options, arguments)
for idx, data_set in enumerate(data_sets, start=0):
x_values, y_values, line_style = data_set
- result += '\nplt.plot('
- result += '%s, %s, \'%s\'' % ( x_values, y_values, line_style )
- result += ', label=\'%s try\')' % idx
- print('import matplotlib.pyplot as plt')
+ result += "\nplt.plot("
+ result += "%s, %s, '%s'" % (x_values, y_values, line_style)
+ result += ", label='%s try')" % idx
+ print("import matplotlib.pyplot as plt")
print(result)
- print( 'plt.xlabel(\'Try number\')' )
- print( 'plt.ylabel(\'Timing\')' )
- print( 'plt.title(\'Benchmark query timings\')' )
- print('plt.legend()')
- print('plt.show()')
+ print("plt.xlabel('Try number')")
+ print("plt.ylabel('Timing')")
+ print("plt.title('Benchmark query timings')")
+ print("plt.legend()")
+ print("plt.show()")
def gen_html_json(options, arguments):
tuples = read_stats_file(options, arguments[1])
- print('{')
+ print("{")
print('"system: GreenPlum(x2),')
- print(('"version": "%s",' % '4.3.9.1'))
+ print(('"version": "%s",' % "4.3.9.1"))
print('"data_size": 10000000,')
print('"time": "",')
print('"comments": "",')
print('"result":')
- print('[')
+ print("[")
for s in tuples:
print(s)
- print(']')
- print('}')
+ print("]")
+ print("}")
def main():
- ( options, arguments ) = parse_args()
+ (options, arguments) = parse_args()
if len(arguments) > 2:
gen_pyplot_code(options, arguments)
else:
gen_html_json(options, arguments)
-if __name__ == '__main__':
+
+if __name__ == "__main__":
main()
diff --git a/cmake/strip.sh b/cmake/strip.sh
index de596887159..f85d82fab31 100755
--- a/cmake/strip.sh
+++ b/cmake/strip.sh
@@ -1,15 +1,14 @@
#!/usr/bin/env bash
BINARY_PATH=$1
-BINARY_NAME=$(basename $BINARY_PATH)
+BINARY_NAME=$(basename "$BINARY_PATH")
DESTINATION_STRIPPED_DIR=$2
OBJCOPY_PATH=${3:objcopy}
READELF_PATH=${4:readelf}
-BUILD_ID=$($READELF_PATH -n $1 | sed -n '/Build ID/ { s/.*: //p; q; }')
+BUILD_ID=$($READELF_PATH -n "$1" | sed -n '/Build ID/ { s/.*: //p; q; }')
BUILD_ID_PREFIX=${BUILD_ID:0:2}
BUILD_ID_SUFFIX=${BUILD_ID:2}
-TEMP_BINARY_PATH="${BINARY_PATH}_temp"
DESTINATION_DEBUG_INFO_DIR="$DESTINATION_STRIPPED_DIR/lib/debug/.build-id"
DESTINATION_STRIP_BINARY_DIR="$DESTINATION_STRIPPED_DIR/bin"
@@ -17,9 +16,13 @@ DESTINATION_STRIP_BINARY_DIR="$DESTINATION_STRIPPED_DIR/bin"
mkdir -p "$DESTINATION_DEBUG_INFO_DIR/$BUILD_ID_PREFIX"
mkdir -p "$DESTINATION_STRIP_BINARY_DIR"
-$OBJCOPY_PATH --only-keep-debug "$BINARY_PATH" "$DESTINATION_DEBUG_INFO_DIR/$BUILD_ID_PREFIX/$BUILD_ID_SUFFIX.debug"
-touch "$TEMP_BINARY_PATH"
-$OBJCOPY_PATH --add-gnu-debuglink "$DESTINATION_DEBUG_INFO_DIR/$BUILD_ID_PREFIX/$BUILD_ID_SUFFIX.debug" "$BINARY_PATH" "$TEMP_BINARY_PATH"
-$OBJCOPY_PATH --strip-all "$TEMP_BINARY_PATH" "$DESTINATION_STRIP_BINARY_DIR/$BINARY_NAME"
-rm -f "$TEMP_BINARY_PATH"
+cp "$BINARY_PATH" "$DESTINATION_STRIP_BINARY_DIR/$BINARY_NAME"
+
+$OBJCOPY_PATH --only-keep-debug --compress-debug-sections "$DESTINATION_STRIP_BINARY_DIR/$BINARY_NAME" "$DESTINATION_DEBUG_INFO_DIR/$BUILD_ID_PREFIX/$BUILD_ID_SUFFIX.debug"
+chmod 0644 "$DESTINATION_DEBUG_INFO_DIR/$BUILD_ID_PREFIX/$BUILD_ID_SUFFIX.debug"
+chown 0:0 "$DESTINATION_DEBUG_INFO_DIR/$BUILD_ID_PREFIX/$BUILD_ID_SUFFIX.debug"
+
+strip --remove-section=.comment --remove-section=.note "$DESTINATION_STRIP_BINARY_DIR/$BINARY_NAME"
+
+$OBJCOPY_PATH --add-gnu-debuglink "$DESTINATION_DEBUG_INFO_DIR/$BUILD_ID_PREFIX/$BUILD_ID_SUFFIX.debug" "$DESTINATION_STRIP_BINARY_DIR/$BINARY_NAME"
diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile
index e3e2e689b17..a57a734e3df 100644
--- a/docker/packager/binary/Dockerfile
+++ b/docker/packager/binary/Dockerfile
@@ -95,6 +95,14 @@ RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
&& apt-get install gcc-11 g++-11 --yes \
&& apt-get clean
+# Architecture of the image when BuildKit/buildx is used
+ARG TARGETARCH
+ARG NFPM_VERSION=2.15.0
+
+RUN arch=${TARGETARCH:-amd64} \
+ && curl -Lo /tmp/nfpm.deb "https://github.com/goreleaser/nfpm/releases/download/v${NFPM_VERSION}/nfpm_${arch}.deb" \
+ && dpkg -i /tmp/nfpm.deb \
+ && rm /tmp/nfpm.deb
COPY build.sh /
-CMD ["bash", "-c", "/build.sh 2>&1 | ts"]
+CMD ["bash", "-c", "/build.sh 2>&1"]
diff --git a/docker/packager/binary/build.sh b/docker/packager/binary/build.sh
index 2f18b07ffe1..31416e1a0ee 100755
--- a/docker/packager/binary/build.sh
+++ b/docker/packager/binary/build.sh
@@ -1,7 +1,13 @@
#!/usr/bin/env bash
+exec &> >(ts)
set -x -e
+cache_status () {
+ ccache --show-config ||:
+ ccache --show-stats ||:
+}
+
mkdir -p build/cmake/toolchain/darwin-x86_64
tar xJf MacOSX11.0.sdk.tar.xz -C build/cmake/toolchain/darwin-x86_64 --strip-components=1
ln -sf darwin-x86_64 build/cmake/toolchain/darwin-aarch64
@@ -19,15 +25,23 @@ read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
env
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
-ccache --show-config ||:
-ccache --show-stats ||:
+cache_status
+# clear cache stats
ccache --zero-stats ||:
-# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
+# No quotes because I want it to expand to nothing if empty.
+# shellcheck disable=SC2086
ninja $NINJA_FLAGS clickhouse-bundle
-ccache --show-config ||:
-ccache --show-stats ||:
+cache_status
+
+if [ -n "$MAKE_DEB" ]; then
+ rm -rf /build/packages/root
+ # No quotes because I want it to expand to nothing if empty.
+ # shellcheck disable=SC2086
+ DESTDIR=/build/packages/root ninja $NINJA_FLAGS install
+ bash -x /build/packages/build
+fi
mv ./programs/clickhouse* /output
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
@@ -84,8 +98,7 @@ fi
# ../docker/packager/other/fuzzer.sh
# fi
-ccache --show-config ||:
-ccache --show-stats ||:
+cache_status
if [ "${CCACHE_DEBUG:-}" == "1" ]
then
diff --git a/docker/packager/packager b/docker/packager/packager
index 05b2e02df96..a5763273f5f 100755
--- a/docker/packager/packager
+++ b/docker/packager/packager
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-#-*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
import subprocess
import os
import argparse
@@ -8,36 +8,39 @@ import sys
SCRIPT_PATH = os.path.realpath(__file__)
-IMAGE_MAP = {
- "deb": "clickhouse/deb-builder",
- "binary": "clickhouse/binary-builder",
-}
def check_image_exists_locally(image_name):
try:
- output = subprocess.check_output("docker images -q {} 2> /dev/null".format(image_name), shell=True)
+ output = subprocess.check_output(
+ f"docker images -q {image_name} 2> /dev/null", shell=True
+ )
return output != ""
- except subprocess.CalledProcessError as ex:
+ except subprocess.CalledProcessError:
return False
+
def pull_image(image_name):
try:
- subprocess.check_call("docker pull {}".format(image_name), shell=True)
+ subprocess.check_call(f"docker pull {image_name}", shell=True)
return True
- except subprocess.CalledProcessError as ex:
- logging.info("Cannot pull image {}".format(image_name))
+ except subprocess.CalledProcessError:
+ logging.info(f"Cannot pull image {image_name}".format())
return False
+
def build_image(image_name, filepath):
context = os.path.dirname(filepath)
- build_cmd = "docker build --network=host -t {} -f {} {}".format(image_name, filepath, context)
- logging.info("Will build image with cmd: '{}'".format(build_cmd))
+ build_cmd = f"docker build --network=host -t {image_name} -f {filepath} {context}"
+ logging.info("Will build image with cmd: '%s'", build_cmd)
subprocess.check_call(
build_cmd,
shell=True,
)
-def run_docker_image_with_env(image_name, output, env_variables, ch_root, ccache_dir, docker_image_version):
+
+def run_docker_image_with_env(
+ image_name, output, env_variables, ch_root, ccache_dir, docker_image_version
+):
env_part = " -e ".join(env_variables)
if env_part:
env_part = " -e " + env_part
@@ -47,28 +50,52 @@ def run_docker_image_with_env(image_name, output, env_variables, ch_root, ccache
else:
interactive = ""
- cmd = "docker run --network=host --rm --volume={output_path}:/output --volume={ch_root}:/build --volume={ccache_dir}:/ccache {env} {interactive} {img_name}".format(
- output_path=output,
- ch_root=ch_root,
- ccache_dir=ccache_dir,
- env=env_part,
- img_name=image_name + ":" + docker_image_version,
- interactive=interactive
+ cmd = (
+ f"docker run --network=host --rm --volume={output}:/output "
+ f"--volume={ch_root}:/build --volume={ccache_dir}:/ccache {env_part} "
+ f"{interactive} {image_name}:{docker_image_version}"
)
- logging.info("Will build ClickHouse pkg with cmd: '{}'".format(cmd))
+ logging.info("Will build ClickHouse pkg with cmd: '%s'", cmd)
subprocess.check_call(cmd, shell=True)
-def parse_env_variables(build_type, compiler, sanitizer, package_type, image_type, cache, distcc_hosts, split_binary, clang_tidy, version, author, official, alien_pkgs, with_coverage, with_binaries):
+
+def is_release_build(build_type, package_type, sanitizer, split_binary):
+ return (
+ build_type == ""
+ and package_type == "deb"
+ and sanitizer == ""
+ and not split_binary
+ )
+
+
+def parse_env_variables(
+ build_type,
+ compiler,
+ sanitizer,
+ package_type,
+ image_type,
+ cache,
+ distcc_hosts,
+ split_binary,
+ clang_tidy,
+ version,
+ author,
+ official,
+ additional_pkgs,
+ with_coverage,
+ with_binaries,
+):
DARWIN_SUFFIX = "-darwin"
DARWIN_ARM_SUFFIX = "-darwin-aarch64"
ARM_SUFFIX = "-aarch64"
FREEBSD_SUFFIX = "-freebsd"
- PPC_SUFFIX = '-ppc64le'
+ PPC_SUFFIX = "-ppc64le"
result = []
- cmake_flags = ['$CMAKE_FLAGS']
+ result.append("OUTPUT_DIR=/output")
+ cmake_flags = ["$CMAKE_FLAGS"]
is_cross_darwin = compiler.endswith(DARWIN_SUFFIX)
is_cross_darwin_arm = compiler.endswith(DARWIN_ARM_SUFFIX)
@@ -77,46 +104,72 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
is_cross_freebsd = compiler.endswith(FREEBSD_SUFFIX)
if is_cross_darwin:
- cc = compiler[:-len(DARWIN_SUFFIX)]
+ cc = compiler[: -len(DARWIN_SUFFIX)]
cmake_flags.append("-DCMAKE_AR:FILEPATH=/cctools/bin/x86_64-apple-darwin-ar")
- cmake_flags.append("-DCMAKE_INSTALL_NAME_TOOL=/cctools/bin/x86_64-apple-darwin-install_name_tool")
- cmake_flags.append("-DCMAKE_RANLIB:FILEPATH=/cctools/bin/x86_64-apple-darwin-ranlib")
+ cmake_flags.append(
+ "-DCMAKE_INSTALL_NAME_TOOL=/cctools/bin/"
+ "x86_64-apple-darwin-install_name_tool"
+ )
+ cmake_flags.append(
+ "-DCMAKE_RANLIB:FILEPATH=/cctools/bin/x86_64-apple-darwin-ranlib"
+ )
cmake_flags.append("-DLINKER_NAME=/cctools/bin/x86_64-apple-darwin-ld")
- cmake_flags.append("-DCMAKE_TOOLCHAIN_FILE=/build/cmake/darwin/toolchain-x86_64.cmake")
+ cmake_flags.append(
+ "-DCMAKE_TOOLCHAIN_FILE=/build/cmake/darwin/toolchain-x86_64.cmake"
+ )
elif is_cross_darwin_arm:
- cc = compiler[:-len(DARWIN_ARM_SUFFIX)]
+ cc = compiler[: -len(DARWIN_ARM_SUFFIX)]
cmake_flags.append("-DCMAKE_AR:FILEPATH=/cctools/bin/aarch64-apple-darwin-ar")
- cmake_flags.append("-DCMAKE_INSTALL_NAME_TOOL=/cctools/bin/aarch64-apple-darwin-install_name_tool")
- cmake_flags.append("-DCMAKE_RANLIB:FILEPATH=/cctools/bin/aarch64-apple-darwin-ranlib")
+ cmake_flags.append(
+ "-DCMAKE_INSTALL_NAME_TOOL=/cctools/bin/"
+ "aarch64-apple-darwin-install_name_tool"
+ )
+ cmake_flags.append(
+ "-DCMAKE_RANLIB:FILEPATH=/cctools/bin/aarch64-apple-darwin-ranlib"
+ )
cmake_flags.append("-DLINKER_NAME=/cctools/bin/aarch64-apple-darwin-ld")
- cmake_flags.append("-DCMAKE_TOOLCHAIN_FILE=/build/cmake/darwin/toolchain-aarch64.cmake")
+ cmake_flags.append(
+ "-DCMAKE_TOOLCHAIN_FILE=/build/cmake/darwin/toolchain-aarch64.cmake"
+ )
elif is_cross_arm:
- cc = compiler[:-len(ARM_SUFFIX)]
- cmake_flags.append("-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-aarch64.cmake")
- result.append("DEB_ARCH_FLAG=-aarm64")
+ cc = compiler[: -len(ARM_SUFFIX)]
+ cmake_flags.append(
+ "-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-aarch64.cmake"
+ )
+ result.append("DEB_ARCH=arm64")
elif is_cross_freebsd:
- cc = compiler[:-len(FREEBSD_SUFFIX)]
- cmake_flags.append("-DCMAKE_TOOLCHAIN_FILE=/build/cmake/freebsd/toolchain-x86_64.cmake")
+ cc = compiler[: -len(FREEBSD_SUFFIX)]
+ cmake_flags.append(
+ "-DCMAKE_TOOLCHAIN_FILE=/build/cmake/freebsd/toolchain-x86_64.cmake"
+ )
elif is_cross_ppc:
- cc = compiler[:-len(PPC_SUFFIX)]
- cmake_flags.append("-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-ppc64le.cmake")
+ cc = compiler[: -len(PPC_SUFFIX)]
+ cmake_flags.append(
+ "-DCMAKE_TOOLCHAIN_FILE=/build/cmake/linux/toolchain-ppc64le.cmake"
+ )
else:
cc = compiler
- result.append("DEB_ARCH_FLAG=-aamd64")
+ result.append("DEB_ARCH=amd64")
- cxx = cc.replace('gcc', 'g++').replace('clang', 'clang++')
+ cxx = cc.replace("gcc", "g++").replace("clang", "clang++")
if image_type == "deb":
- result.append("DEB_CC={}".format(cc))
- result.append("DEB_CXX={}".format(cxx))
- # For building fuzzers
- result.append("CC={}".format(cc))
- result.append("CXX={}".format(cxx))
- elif image_type == "binary":
- result.append("CC={}".format(cc))
- result.append("CXX={}".format(cxx))
- cmake_flags.append('-DCMAKE_C_COMPILER=`which {}`'.format(cc))
- cmake_flags.append('-DCMAKE_CXX_COMPILER=`which {}`'.format(cxx))
+ result.append("MAKE_DEB=true")
+ cmake_flags.append("-DENABLE_TESTS=0")
+ cmake_flags.append("-DENABLE_UTILS=0")
+ cmake_flags.append("-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=ON")
+ cmake_flags.append("-DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=ON")
+ cmake_flags.append("-DCMAKE_AUTOGEN_VERBOSE=ON")
+ cmake_flags.append("-DCMAKE_INSTALL_PREFIX=/usr")
+ cmake_flags.append("-DCMAKE_INSTALL_SYSCONFDIR=/etc")
+ cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var")
+ if is_release_build(build_type, package_type, sanitizer, split_binary):
+ cmake_flags.append("-DINSTALL_STRIPPED_BINARIES=ON")
+
+ result.append(f"CC={cc}")
+ result.append(f"CXX={cxx}")
+ cmake_flags.append(f"-DCMAKE_C_COMPILER={cc}")
+ cmake_flags.append(f"-DCMAKE_CXX_COMPILER={cxx}")
# Create combined output archive for split build and for performance tests.
if package_type == "performance":
@@ -126,12 +179,14 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
result.append("COMBINED_OUTPUT=shared_build")
if sanitizer:
- result.append("SANITIZER={}".format(sanitizer))
+ result.append(f"SANITIZER={sanitizer}")
if build_type:
- result.append("BUILD_TYPE={}".format(build_type))
+ result.append(f"BUILD_TYPE={build_type.capitalize()}")
+ else:
+ result.append("BUILD_TYPE=None")
- if cache == 'distcc':
- result.append("CCACHE_PREFIX={}".format(cache))
+ if cache == "distcc":
+ result.append(f"CCACHE_PREFIX={cache}")
if cache:
result.append("CCACHE_DIR=/ccache")
@@ -142,109 +197,188 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
# result.append("CCACHE_UMASK=777")
if distcc_hosts:
- hosts_with_params = ["{}/24,lzo".format(host) for host in distcc_hosts] + ["localhost/`nproc`"]
- result.append('DISTCC_HOSTS="{}"'.format(" ".join(hosts_with_params)))
+ hosts_with_params = [f"{host}/24,lzo" for host in distcc_hosts] + [
+ "localhost/`nproc`"
+ ]
+ result.append('DISTCC_HOSTS="' + " ".join(hosts_with_params) + '"')
elif cache == "distcc":
- result.append('DISTCC_HOSTS="{}"'.format("localhost/`nproc`"))
+ result.append('DISTCC_HOSTS="localhost/`nproc`"')
- if alien_pkgs:
- result.append("ALIEN_PKGS='" + ' '.join(['--' + pkg for pkg in alien_pkgs]) + "'")
+ if additional_pkgs:
+ result.append("MAKE_APK=true")
+ result.append("MAKE_RPM=true")
+ result.append("MAKE_TGZ=true")
if with_binaries == "programs":
- result.append('BINARY_OUTPUT=programs')
+ result.append("BINARY_OUTPUT=programs")
elif with_binaries == "tests":
- result.append('ENABLE_TESTS=1')
- result.append('BINARY_OUTPUT=tests')
- cmake_flags.append('-DENABLE_TESTS=1')
+ result.append("ENABLE_TESTS=1")
+ result.append("BINARY_OUTPUT=tests")
+ cmake_flags.append("-DENABLE_TESTS=1")
if split_binary:
- cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1')
+ cmake_flags.append(
+ "-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 "
+ "-DCLICKHOUSE_SPLIT_BINARY=1"
+ )
# We can't always build utils because it requires too much space, but
# we have to build them at least in some way in CI. The split build is
# probably the least heavy disk-wise.
- cmake_flags.append('-DENABLE_UTILS=1')
+ cmake_flags.append("-DENABLE_UTILS=1")
if clang_tidy:
- cmake_flags.append('-DENABLE_CLANG_TIDY=1')
- cmake_flags.append('-DENABLE_UTILS=1')
- cmake_flags.append('-DENABLE_TESTS=1')
- cmake_flags.append('-DENABLE_EXAMPLES=1')
+ cmake_flags.append("-DENABLE_CLANG_TIDY=1")
+ cmake_flags.append("-DENABLE_UTILS=1")
+ cmake_flags.append("-DENABLE_TESTS=1")
+ cmake_flags.append("-DENABLE_EXAMPLES=1")
# Don't stop on first error to find more clang-tidy errors in one run.
- result.append('NINJA_FLAGS=-k0')
+ result.append("NINJA_FLAGS=-k0")
if with_coverage:
- cmake_flags.append('-DWITH_COVERAGE=1')
+ cmake_flags.append("-DWITH_COVERAGE=1")
if version:
- result.append("VERSION_STRING='{}'".format(version))
+ result.append(f"VERSION_STRING='{version}'")
if author:
- result.append("AUTHOR='{}'".format(author))
+ result.append(f"AUTHOR='{author}'")
if official:
- cmake_flags.append('-DYANDEX_OFFICIAL_BUILD=1')
+ cmake_flags.append("-DYANDEX_OFFICIAL_BUILD=1")
- result.append('CMAKE_FLAGS="' + ' '.join(cmake_flags) + '"')
+ result.append('CMAKE_FLAGS="' + " ".join(cmake_flags) + '"')
return result
+
if __name__ == "__main__":
- logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
- parser = argparse.ArgumentParser(description="ClickHouse building script using prebuilt Docker image")
- # 'performance' creates a combined .tgz with server and configs to be used for performance test.
- parser.add_argument("--package-type", choices=['deb', 'binary', 'performance'], required=True)
- parser.add_argument("--clickhouse-repo-path", default=os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir, os.pardir))
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s %(message)s")
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ description="ClickHouse building script using prebuilt Docker image",
+ )
+ # 'performance' creates a combined .tgz with server
+ # and configs to be used for performance test.
+ parser.add_argument(
+ "--package-type",
+ choices=("deb", "binary", "performance"),
+ required=True,
+ help="a build type",
+ )
+ parser.add_argument(
+ "--clickhouse-repo-path",
+ default=os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), os.pardir, os.pardir
+ ),
+ help="ClickHouse git repository",
+ )
parser.add_argument("--output-dir", required=True)
parser.add_argument("--build-type", choices=("debug", ""), default="")
- parser.add_argument("--compiler", choices=("clang-11", "clang-11-darwin", "clang-11-darwin-aarch64", "clang-11-aarch64",
- "clang-12", "clang-12-darwin", "clang-12-darwin-aarch64", "clang-12-aarch64",
- "clang-13", "clang-13-darwin", "clang-13-darwin-aarch64", "clang-13-aarch64", "clang-13-ppc64le",
- "clang-11-freebsd", "clang-12-freebsd", "clang-13-freebsd", "gcc-11"), default="clang-13")
- parser.add_argument("--sanitizer", choices=("address", "thread", "memory", "undefined", ""), default="")
+ parser.add_argument(
+ "--compiler",
+ choices=(
+ "clang-11",
+ "clang-11-darwin",
+ "clang-11-darwin-aarch64",
+ "clang-11-aarch64",
+ "clang-12",
+ "clang-12-darwin",
+ "clang-12-darwin-aarch64",
+ "clang-12-aarch64",
+ "clang-13",
+ "clang-13-darwin",
+ "clang-13-darwin-aarch64",
+ "clang-13-aarch64",
+ "clang-13-ppc64le",
+ "clang-11-freebsd",
+ "clang-12-freebsd",
+ "clang-13-freebsd",
+ "gcc-11",
+ ),
+ default="clang-13",
+ help="a compiler to use",
+ )
+ parser.add_argument(
+ "--sanitizer",
+ choices=("address", "thread", "memory", "undefined", ""),
+ default="",
+ )
parser.add_argument("--split-binary", action="store_true")
parser.add_argument("--clang-tidy", action="store_true")
- parser.add_argument("--cache", choices=("", "ccache", "distcc"), default="")
- parser.add_argument("--ccache_dir", default= os.getenv("HOME", "") + '/.ccache')
+ parser.add_argument("--cache", choices=("ccache", "distcc", ""), default="")
+ parser.add_argument(
+ "--ccache_dir",
+ default=os.getenv("HOME", "") + "/.ccache",
+ help="a directory with ccache",
+ )
parser.add_argument("--distcc-hosts", nargs="+")
parser.add_argument("--force-build-image", action="store_true")
parser.add_argument("--version")
- parser.add_argument("--author", default="clickhouse")
+ parser.add_argument("--author", default="clickhouse", help="a package author")
parser.add_argument("--official", action="store_true")
- parser.add_argument("--alien-pkgs", nargs='+', default=[])
+ parser.add_argument("--additional-pkgs", action="store_true")
parser.add_argument("--with-coverage", action="store_true")
- parser.add_argument("--with-binaries", choices=("programs", "tests", ""), default="")
- parser.add_argument("--docker-image-version", default="latest")
+ parser.add_argument(
+ "--with-binaries", choices=("programs", "tests", ""), default=""
+ )
+ parser.add_argument(
+ "--docker-image-version", default="latest", help="docker image tag to use"
+ )
args = parser.parse_args()
if not os.path.isabs(args.output_dir):
args.output_dir = os.path.abspath(os.path.join(os.getcwd(), args.output_dir))
- image_type = 'binary' if args.package_type == 'performance' else args.package_type
- image_name = IMAGE_MAP[image_type]
+ image_type = "binary" if args.package_type == "performance" else args.package_type
+ image_name = "clickhouse/binary-builder"
if not os.path.isabs(args.clickhouse_repo_path):
ch_root = os.path.abspath(os.path.join(os.getcwd(), args.clickhouse_repo_path))
else:
ch_root = args.clickhouse_repo_path
- if args.alien_pkgs and not image_type == "deb":
- raise Exception("Can add alien packages only in deb build")
+ if args.additional_pkgs and image_type != "deb":
+ raise Exception("Can build additional packages only in deb build")
- if args.with_binaries != "" and not image_type == "deb":
+ if args.with_binaries != "" and image_type != "deb":
raise Exception("Can add additional binaries only in deb build")
if args.with_binaries != "" and image_type == "deb":
- logging.info("Should place {} to output".format(args.with_binaries))
+ logging.info("Should place %s to output", args.with_binaries)
dockerfile = os.path.join(ch_root, "docker/packager", image_type, "Dockerfile")
image_with_version = image_name + ":" + args.docker_image_version
- if image_type != "freebsd" and not check_image_exists_locally(image_name) or args.force_build_image:
+ if (
+ image_type != "freebsd"
+ and not check_image_exists_locally(image_name)
+ or args.force_build_image
+ ):
if not pull_image(image_with_version) or args.force_build_image:
build_image(image_with_version, dockerfile)
env_prepared = parse_env_variables(
- args.build_type, args.compiler, args.sanitizer, args.package_type, image_type,
- args.cache, args.distcc_hosts, args.split_binary, args.clang_tidy,
- args.version, args.author, args.official, args.alien_pkgs, args.with_coverage, args.with_binaries)
+ args.build_type,
+ args.compiler,
+ args.sanitizer,
+ args.package_type,
+ image_type,
+ args.cache,
+ args.distcc_hosts,
+ args.split_binary,
+ args.clang_tidy,
+ args.version,
+ args.author,
+ args.official,
+ args.additional_pkgs,
+ args.with_coverage,
+ args.with_binaries,
+ )
- run_docker_image_with_env(image_name, args.output_dir, env_prepared, ch_root, args.ccache_dir, args.docker_image_version)
- logging.info("Output placed into {}".format(args.output_dir))
+ run_docker_image_with_env(
+ image_name,
+ args.output_dir,
+ env_prepared,
+ ch_root,
+ args.ccache_dir,
+ args.docker_image_version,
+ )
+ logging.info("Output placed into %s", args.output_dir)
diff --git a/docker/test/fuzzer/generate-test-j2.py b/docker/test/fuzzer/generate-test-j2.py
index bcc1bf6bc84..11525163ed8 100755
--- a/docker/test/fuzzer/generate-test-j2.py
+++ b/docker/test/fuzzer/generate-test-j2.py
@@ -11,7 +11,7 @@ def removesuffix(text, suffix):
https://www.python.org/dev/peps/pep-0616/
"""
if suffix and text.endswith(suffix):
- return text[:-len(suffix)]
+ return text[: -len(suffix)]
else:
return text[:]
diff --git a/docker/test/integration/hive_server/http_api_server.py b/docker/test/integration/hive_server/http_api_server.py
index 4818b785c89..8a9d3da4846 100644
--- a/docker/test/integration/hive_server/http_api_server.py
+++ b/docker/test/integration/hive_server/http_api_server.py
@@ -3,55 +3,55 @@ import subprocess
import datetime
from flask import Flask, flash, request, redirect, url_for
+
def run_command(command, wait=False):
print("{} - execute shell command:{}".format(datetime.datetime.now(), command))
lines = []
- p = subprocess.Popen(command,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- shell=True)
+ p = subprocess.Popen(
+ command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True
+ )
if wait:
- for l in iter(p.stdout.readline, b''):
+ for l in iter(p.stdout.readline, b""):
lines.append(l)
p.poll()
return (lines, p.returncode)
else:
- return(iter(p.stdout.readline, b''), 0)
+ return (iter(p.stdout.readline, b""), 0)
-UPLOAD_FOLDER = './'
-ALLOWED_EXTENSIONS = {'txt', 'sh'}
+UPLOAD_FOLDER = "./"
+ALLOWED_EXTENSIONS = {"txt", "sh"}
app = Flask(__name__)
-app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
+app.config["UPLOAD_FOLDER"] = UPLOAD_FOLDER
-@app.route('/')
+
+@app.route("/")
def hello_world():
- return 'Hello World'
+ return "Hello World"
def allowed_file(filename):
- return '.' in filename and \
- filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
+ return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXTENSIONS
-@app.route('/upload', methods=['GET', 'POST'])
+@app.route("/upload", methods=["GET", "POST"])
def upload_file():
- if request.method == 'POST':
+ if request.method == "POST":
# check if the post request has the file part
- if 'file' not in request.files:
- flash('No file part')
+ if "file" not in request.files:
+ flash("No file part")
return redirect(request.url)
- file = request.files['file']
+ file = request.files["file"]
# If the user does not select a file, the browser submits an
# empty file without a filename.
- if file.filename == '':
- flash('No selected file')
+ if file.filename == "":
+ flash("No selected file")
return redirect(request.url)
if file and allowed_file(file.filename):
filename = file.filename
- file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
- return redirect(url_for('upload_file', name=filename))
- return '''
+ file.save(os.path.join(app.config["UPLOAD_FOLDER"], filename))
+ return redirect(url_for("upload_file", name=filename))
+ return """
{}'.format(x) for x in - [open('left-commit.txt').read(), - open('right-commit.txt').read()]]]) + addSimpleTable( + "Tested Commits", + ["Old", "New"], + [ + [ + "
{}".format(x) + for x in [ + open("left-commit.txt").read(), + open("right-commit.txt").read(), + ] + ] + ], + ) except: # Don't fail if no commit info -- maybe it's a manual run. - report_errors.append( - traceback.format_exception_only( - *sys.exc_info()[:2])[-1]) + report_errors.append(traceback.format_exception_only(*sys.exc_info()[:2])[-1]) pass + def add_report_errors(): global tables global report_errors # Add the errors reported by various steps of comparison script try: - report_errors += [l.strip() for l in open('report/errors.log')] + report_errors += [l.strip() for l in open("report/errors.log")] except: - report_errors.append( - traceback.format_exception_only( - *sys.exc_info()[:2])[-1]) + report_errors.append(traceback.format_exception_only(*sys.exc_info()[:2])[-1]) pass if not report_errors: return - text = tableStart('Errors while Building the Report') - text += tableHeader(['Error']) + text = tableStart("Errors while Building the Report") + text += tableHeader(["Error"]) for x in report_errors: text += tableRow([x]) text += tableEnd() # Insert after Tested Commits tables.insert(1, text) - errors_explained.append([f'There were some errors while building the report']); + errors_explained.append( + [ + f'There were some errors while building the report' + ] + ) + def add_errors_explained(): if not errors_explained: return text = '' - text += tableStart('Error Summary') - text += tableHeader(['Description']) + text += tableStart("Error Summary") + text += tableHeader(["Description"]) for row in errors_explained: text += tableRow(row) text += tableEnd() @@ -321,59 +364,81 @@ def add_errors_explained(): tables.insert(1, text) -if args.report == 'main': +if args.report == "main": print((header_template.format())) add_tested_commits() - - run_error_rows = tsvRows('run-errors.tsv') + run_error_rows = tsvRows("run-errors.tsv") error_tests += len(run_error_rows) - addSimpleTable('Run Errors', ['Test', 'Error'], run_error_rows) + addSimpleTable("Run Errors", ["Test", "Error"], run_error_rows) if run_error_rows: - errors_explained.append([f'There were some errors while running the tests']); + errors_explained.append( + [ + f'There were some errors while running the tests' + ] + ) - - slow_on_client_rows = tsvRows('report/slow-on-client.tsv') + slow_on_client_rows = tsvRows("report/slow-on-client.tsv") error_tests += len(slow_on_client_rows) - addSimpleTable('Slow on Client', - ['Client time, s', 'Server time, s', 'Ratio', 'Test', 'Query'], - slow_on_client_rows) + addSimpleTable( + "Slow on Client", + ["Client time, s", "Server time, s", "Ratio", "Test", "Query"], + slow_on_client_rows, + ) if slow_on_client_rows: - errors_explained.append([f'Some queries are taking noticeable time client-side (missing `FORMAT Null`?)']); + errors_explained.append( + [ + f'Some queries are taking noticeable time client-side (missing `FORMAT Null`?)' + ] + ) - unmarked_short_rows = tsvRows('report/unexpected-query-duration.tsv') + unmarked_short_rows = tsvRows("report/unexpected-query-duration.tsv") error_tests += len(unmarked_short_rows) - addSimpleTable('Unexpected Query Duration', - ['Problem', 'Marked as "short"?', 'Run time, s', 'Test', '#', 'Query'], - unmarked_short_rows) + addSimpleTable( + "Unexpected Query Duration", + ["Problem", 'Marked as "short"?', "Run time, s", "Test", "#", "Query"], + unmarked_short_rows, + ) if unmarked_short_rows: - errors_explained.append([f'Some queries have unexpected duration']); + errors_explained.append( + [ + f'Some queries have unexpected duration' + ] + ) def add_partial(): - rows = tsvRows('report/partial-queries-report.tsv') + rows = tsvRows("report/partial-queries-report.tsv") if not rows: return global unstable_partial_queries, slow_average_tests, tables - text = tableStart('Partial Queries') - columns = ['Median time, s', 'Relative time variance', 'Test', '#', 'Query'] + text = tableStart("Partial Queries") + columns = ["Median time, s", "Relative time variance", "Test", "#", "Query"] text += tableHeader(columns) - attrs = ['' for c in columns] + attrs = ["" for c in columns] for row in rows: - anchor = f'{currentTableAnchor()}.{row[2]}.{row[3]}' + anchor = f"{currentTableAnchor()}.{row[2]}.{row[3]}" if float(row[1]) > 0.10: attrs[1] = f'style="background: {color_bad}"' unstable_partial_queries += 1 - errors_explained.append([f'The query no. {row[3]} of test \'{row[2]}\' has excessive variance of run time. Keep it below 10%']) + errors_explained.append( + [ + f"The query no. {row[3]} of test '{row[2]}' has excessive variance of run time. Keep it below 10%" + ] + ) else: - attrs[1] = '' + attrs[1] = "" if float(row[0]) > allowed_single_run_time: attrs[0] = f'style="background: {color_bad}"' - errors_explained.append([f'The query no. {row[3]} of test \'{row[2]}\' is taking too long to run. Keep the run time below {allowed_single_run_time} seconds"']) + errors_explained.append( + [ + f'The query no. {row[3]} of test \'{row[2]}\' is taking too long to run. Keep the run time below {allowed_single_run_time} seconds"' + ] + ) slow_average_tests += 1 else: - attrs[0] = '' + attrs[0] = "" text += tableRow(row, attrs, anchor) text += tableEnd() tables.append(text) @@ -381,41 +446,45 @@ if args.report == 'main': add_partial() def add_changes(): - rows = tsvRows('report/changed-perf.tsv') + rows = tsvRows("report/changed-perf.tsv") if not rows: return global faster_queries, slower_queries, tables - text = tableStart('Changes in Performance') + text = tableStart("Changes in Performance") columns = [ - 'Old, s', # 0 - 'New, s', # 1 - 'Ratio of speedup (-) or slowdown (+)', # 2 - 'Relative difference (new − old) / old', # 3 - 'p < 0.01 threshold', # 4 - '', # Failed # 5 - 'Test', # 6 - '#', # 7 - 'Query', # 8 - ] - attrs = ['' for c in columns] + "Old, s", # 0 + "New, s", # 1 + "Ratio of speedup (-) or slowdown (+)", # 2 + "Relative difference (new − old) / old", # 3 + "p < 0.01 threshold", # 4 + "", # Failed # 5 + "Test", # 6 + "#", # 7 + "Query", # 8 + ] + attrs = ["" for c in columns] attrs[5] = None text += tableHeader(columns, attrs) for row in rows: - anchor = f'{currentTableAnchor()}.{row[6]}.{row[7]}' + anchor = f"{currentTableAnchor()}.{row[6]}.{row[7]}" if int(row[5]): - if float(row[3]) < 0.: + if float(row[3]) < 0.0: faster_queries += 1 attrs[2] = attrs[3] = f'style="background: {color_good}"' else: slower_queries += 1 attrs[2] = attrs[3] = f'style="background: {color_bad}"' - errors_explained.append([f'The query no. {row[7]} of test \'{row[6]}\' has slowed down']) + errors_explained.append( + [ + f"The query no. {row[7]} of test '{row[6]}' has slowed down" + ] + ) else: - attrs[2] = attrs[3] = '' + attrs[2] = attrs[3] = "" text += tableRow(row, attrs, anchor) @@ -427,35 +496,35 @@ if args.report == 'main': def add_unstable_queries(): global unstable_queries, very_unstable_queries, tables - unstable_rows = tsvRows('report/unstable-queries.tsv') + unstable_rows = tsvRows("report/unstable-queries.tsv") if not unstable_rows: return unstable_queries += len(unstable_rows) columns = [ - 'Old, s', #0 - 'New, s', #1 - 'Relative difference (new - old)/old', #2 - 'p < 0.01 threshold', #3 - '', # Failed #4 - 'Test', #5 - '#', #6 - 'Query' #7 + "Old, s", # 0 + "New, s", # 1 + "Relative difference (new - old)/old", # 2 + "p < 0.01 threshold", # 3 + "", # Failed #4 + "Test", # 5 + "#", # 6 + "Query", # 7 ] - attrs = ['' for c in columns] + attrs = ["" for c in columns] attrs[4] = None - text = tableStart('Unstable Queries') + text = tableStart("Unstable Queries") text += tableHeader(columns, attrs) for r in unstable_rows: - anchor = f'{currentTableAnchor()}.{r[5]}.{r[6]}' + anchor = f"{currentTableAnchor()}.{r[5]}.{r[6]}" if int(r[4]): very_unstable_queries += 1 attrs[3] = f'style="background: {color_bad}"' else: - attrs[3] = '' + attrs[3] = "" # Just don't add the slightly unstable queries we don't consider # errors. It's not clear what the user should do with them. continue @@ -470,53 +539,70 @@ if args.report == 'main': add_unstable_queries() - skipped_tests_rows = tsvRows('analyze/skipped-tests.tsv') - addSimpleTable('Skipped Tests', ['Test', 'Reason'], skipped_tests_rows) + skipped_tests_rows = tsvRows("analyze/skipped-tests.tsv") + addSimpleTable("Skipped Tests", ["Test", "Reason"], skipped_tests_rows) - addSimpleTable('Test Performance Changes', - ['Test', 'Ratio of speedup (-) or slowdown (+)', 'Queries', 'Total not OK', 'Changed perf', 'Unstable'], - tsvRows('report/test-perf-changes.tsv')) + addSimpleTable( + "Test Performance Changes", + [ + "Test", + "Ratio of speedup (-) or slowdown (+)", + "Queries", + "Total not OK", + "Changed perf", + "Unstable", + ], + tsvRows("report/test-perf-changes.tsv"), + ) def add_test_times(): global slow_average_tests, tables - rows = tsvRows('report/test-times.tsv') + rows = tsvRows("report/test-times.tsv") if not rows: return columns = [ - 'Test', #0 - 'Wall clock time, entire test, s', #1 - 'Total client time for measured query runs, s', #2 - 'Queries', #3 - 'Longest query, total for measured runs, s', #4 - 'Wall clock time per query, s', #5 - 'Shortest query, total for measured runs, s', #6 - '', # Runs #7 - ] - attrs = ['' for c in columns] + "Test", # 0 + "Wall clock time, entire test, s", # 1 + "Total client time for measured query runs, s", # 2 + "Queries", # 3 + "Longest query, total for measured runs, s", # 4 + "Wall clock time per query, s", # 5 + "Shortest query, total for measured runs, s", # 6 + "", # Runs #7 + ] + attrs = ["" for c in columns] attrs[7] = None - text = tableStart('Test Times') + text = tableStart("Test Times") text += tableHeader(columns, attrs) - allowed_average_run_time = 3.75 # 60 seconds per test at (7 + 1) * 2 runs + allowed_average_run_time = 3.75 # 60 seconds per test at (7 + 1) * 2 runs for r in rows: - anchor = f'{currentTableAnchor()}.{r[0]}' + anchor = f"{currentTableAnchor()}.{r[0]}" total_runs = (int(r[7]) + 1) * 2 # one prewarm run, two servers - if r[0] != 'Total' and float(r[5]) > allowed_average_run_time * total_runs: + if r[0] != "Total" and float(r[5]) > allowed_average_run_time * total_runs: # FIXME should be 15s max -- investigate parallel_insert slow_average_tests += 1 attrs[5] = f'style="background: {color_bad}"' - errors_explained.append([f'The test \'{r[0]}\' is too slow to run as a whole. Investigate whether the create and fill queries can be sped up']) + errors_explained.append( + [ + f"The test '{r[0]}' is too slow to run as a whole. Investigate whether the create and fill queries can be sped up" + ] + ) else: - attrs[5] = '' + attrs[5] = "" - if r[0] != 'Total' and float(r[4]) > allowed_single_run_time * total_runs: + if r[0] != "Total" and float(r[4]) > allowed_single_run_time * total_runs: slow_average_tests += 1 attrs[4] = f'style="background: {color_bad}"' - errors_explained.append([f'Some query of the test \'{r[0]}\' is too slow to run. See the all queries report']) + errors_explained.append( + [ + f"Some query of the test '{r[0]}' is too slow to run. See the all queries report" + ] + ) else: - attrs[4] = '' + attrs[4] = "" text += tableRow(r, attrs, anchor) @@ -525,10 +611,17 @@ if args.report == 'main': add_test_times() - addSimpleTable('Metric Changes', - ['Metric', 'Old median value', 'New median value', - 'Relative difference', 'Times difference'], - tsvRows('metrics/changes.tsv')) + addSimpleTable( + "Metric Changes", + [ + "Metric", + "Old median value", + "New median value", + "Relative difference", + "Times difference", + ], + tsvRows("metrics/changes.tsv"), + ) add_report_errors() add_errors_explained() @@ -536,7 +629,8 @@ if args.report == 'main': for t in tables: print(t) - print(f""" + print( + f"""
All queries @@ -546,104 +640,111 @@ if args.report == 'main':