Merge pull request #44820 from ClickHouse/break-ci

Modern tools in CI, part 2.
This commit is contained in:
Alexey Milovidov 2023-01-09 18:26:41 +03:00 committed by GitHub
commit bb8cb829f9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 107 additions and 89 deletions

View File

@ -107,7 +107,7 @@ jobs:
run: |
curl --form token="${COVERITY_TOKEN}" \
--form email='security+coverity@clickhouse.com' \
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tgz" \
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tar.zst" \
--form version="${GITHUB_REF#refs/heads/}-${GITHUB_SHA::6}" \
--form description="Nighly Scan: $(date +'%Y-%m-%dT%H:%M:%S')" \
https://scan.coverity.com/builds?project=ClickHouse%2FClickHouse

View File

@ -22,7 +22,8 @@ RUN apt-get update && \
build-essential \
libc6 \
libc6-dev \
libc6-dev-arm64-cross && \
libc6-dev-arm64-cross \
zstd && \
apt-get clean
ENV CC=clang-${LLVM_VERSION}

View File

@ -159,7 +159,7 @@ then
git -C "$PERF_OUTPUT"/ch log -5
(
cd "$PERF_OUTPUT"/..
tar -cv -I pigz -f /output/performance.tgz output
tar -cv --zstd -f /output/performance.tar.zst output
)
fi
@ -167,15 +167,15 @@ fi
if [ "" != "$COMBINED_OUTPUT" ]
then
prepare_combined_output /output
tar -cv -I pigz -f "$COMBINED_OUTPUT.tgz" /output
tar -cv --zstd -f "$COMBINED_OUTPUT.tar.zst" /output
rm -r /output/*
mv "$COMBINED_OUTPUT.tgz" /output
mv "$COMBINED_OUTPUT.tar.zst" /output
fi
if [ "coverity" == "$COMBINED_OUTPUT" ]
then
tar -cv -I pigz -f "coverity-scan.tgz" cov-int
mv "coverity-scan.tgz" /output
tar -cv --zstd -f "coverity-scan.tar.zst" cov-int
mv "coverity-scan.tar.zst" /output
fi
ccache_status

View File

@ -17,6 +17,7 @@ RUN apt-get update \
python3-termcolor \
unixodbc \
pv \
zstd \
--yes --no-install-recommends
# Install CMake 3.20+ for Rust compilation

View File

@ -188,7 +188,7 @@ function build
cp programs/clickhouse "$FASTTEST_OUTPUT/clickhouse"
strip programs/clickhouse -o "$FASTTEST_OUTPUT/clickhouse-stripped"
gzip "$FASTTEST_OUTPUT/clickhouse-stripped"
zstd --threads=0 "$FASTTEST_OUTPUT/clickhouse-stripped"
fi
ccache --show-stats ||:
ccache --evict-older-than 1d ||:

View File

@ -325,8 +325,8 @@ quit
fi
if test -f core.*; then
pigz core.*
mv core.*.gz core.gz
zstd --threads=0 core.*
mv core.*.zst core.zst
fi
dmesg -T | rg -q -F -e 'Out of memory: Killed process' -e 'oom_reaper: reaped process' -e 'oom-kill:constraint=CONSTRAINT_NONE' && echo "OOM in dmesg" ||:
@ -363,13 +363,13 @@ case "$stage" in
"report")
CORE_LINK=''
if [ -f core.gz ]; then
CORE_LINK='<a href="core.gz">core.gz</a>'
if [ -f core.zst ]; then
CORE_LINK='<a href="core.zst">core.zst</a>'
fi
rg --text -F '<Fatal>' server.log > fatal.log ||:
pigz server.log
zstd --threads=0 server.log
cat > report.html <<EOF ||:
<!DOCTYPE html>
@ -394,7 +394,7 @@ p.links a { padding: 5px; margin: 3px; background: #FFF; line-height: 2; white-s
<p class="links">
<a href="run.log">run.log</a>
<a href="fuzzer.log">fuzzer.log</a>
<a href="server.log.gz">server.log.gz</a>
<a href="server.log.zst">server.log.zst</a>
<a href="main.log">main.log</a>
${CORE_LINK}
</p>

View File

@ -28,8 +28,8 @@ function download
# Historically there were various paths for the performance test package.
# Test all of them.
declare -a urls_to_try=(
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/$BUILD_NAME/performance.tar.zst"
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/$BUILD_NAME/performance.tgz"
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/performance/performance.tgz"
)
for path in "${urls_to_try[@]}"
@ -45,7 +45,7 @@ function download
# download anything, for example in some manual runs. In this case, SHAs are not set.
if ! [ "$left_sha" = "$right_sha" ]
then
wget -nv -nd -c "$left_path" -O- | tar -C left --no-same-owner --strip-components=1 -zxv &
wget -nv -nd -c "$left_path" -O- | tar -C left --no-same-owner --strip-components=1 --zstd --extract --verbose &
elif [ "$right_sha" != "" ]
then
mkdir left ||:
@ -60,7 +60,7 @@ function download
>&2 echo "Unknown dataset '$dataset_name'"
exit 1
fi
cd db0 && wget -nv -nd -c "$dataset_path" -O- | tar -xv &
cd db0 && wget -nv -nd -c "$dataset_path" -O- | tar --extract --verbose &
done
mkdir ~/fg ||:

View File

@ -66,10 +66,8 @@ function find_reference_sha
# test all of them.
unset found
declare -a urls_to_try=(
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/$BUILD_NAME/performance.tar.zst"
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/$BUILD_NAME/performance.tgz"
# FIXME: the following link is left there for backward compatibility.
# We should remove it after 2022-11-01
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/performance/performance.tgz"
)
for path in "${urls_to_try[@]}"
do
@ -94,13 +92,13 @@ chmod 777 workspace output
cd workspace
# Download the package for the version we are going to test.
if curl_with_retry "$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tgz"
if curl_with_retry "$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tar.zst"
then
right_path="$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tgz"
right_path="$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tar.zst"
fi
mkdir right
wget -nv -nd -c "$right_path" -O- | tar -C right --no-same-owner --strip-components=1 -zxv
wget -nv -nd -c "$right_path" -O- | tar -C right --no-same-owner --strip-components=1 --zstd --extract --verbose
# Find reference revision if not specified explicitly
if [ "$REF_SHA" == "" ]; then find_reference_sha; fi

View File

@ -154,19 +154,19 @@ fi
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.zst ||:
# FIXME: remove once only github actions will be left
rm /var/log/clickhouse-server/clickhouse-server.log
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
if [[ -n "$WITH_COVERAGE" ]] && [[ "$WITH_COVERAGE" -eq 1 ]]; then
tar -chf /test_output/clickhouse_coverage.tar.gz /profraw ||:
tar --zstd -c -h -f /test_output/clickhouse_coverage.tar.zst /profraw ||:
fi
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server1.log ||:
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server2.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.gz ||:
pigz < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.gz ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.zst ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.zst ||:
# FIXME: remove once only github actions will be left
rm /var/log/clickhouse-server/clickhouse-server1.log
rm /var/log/clickhouse-server/clickhouse-server2.log

View File

@ -168,7 +168,7 @@ if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]
fi
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz &
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.zst &
# Compress tables.
#
@ -179,10 +179,10 @@ pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhous
# for files >64MB, we want this files to be compressed explicitly
for table in query_log zookeeper_log trace_log transactions_info_log
do
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.tsv.zst ||:
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
clickhouse-local --path /var/lib/clickhouse1/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.1.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse2/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.2.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse1/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.1.tsv.zst ||:
clickhouse-local --path /var/lib/clickhouse2/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.2.tsv.zst ||:
fi
done
@ -199,7 +199,7 @@ do
order by samples desc
settings allow_introspection_functions = 1
format TabSeparated" \
| pigz > "/test_output/trace-log-$trace_type-flamegraph.tsv.gz" ||:
| zstd --threads=0 > "/test_output/trace-log-$trace_type-flamegraph.tsv.zst" ||:
done
@ -207,7 +207,7 @@ done
rm /var/log/clickhouse-server/clickhouse-server.log
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
if [[ -n "$WITH_COVERAGE" ]] && [[ "$WITH_COVERAGE" -eq 1 ]]; then
tar -chf /test_output/clickhouse_coverage.tar.gz /profraw ||:
tar --zstd -chf /test_output/clickhouse_coverage.tar.zst /profraw ||:
fi
tar -chf /test_output/coordination.tar /var/lib/clickhouse/coordination ||:
@ -215,8 +215,8 @@ tar -chf /test_output/coordination.tar /var/lib/clickhouse/coordination ||:
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server1.log ||:
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server2.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.gz ||:
pigz < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.gz ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.zst ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.zst ||:
# FIXME: remove once only github actions will be left
rm /var/log/clickhouse-server/clickhouse-server1.log
rm /var/log/clickhouse-server/clickhouse-server2.log

View File

@ -22,7 +22,7 @@ Still alive
2018-10-22 13:49:16,195 Stress is ok
2018-10-22 13:49:16,195 Copying server log files
$ ls $HOME/test_result
clickhouse-server.err.log clickhouse-server.log.0.gz stderr.log stress_test_run_0.txt stress_test_run_11.txt stress_test_run_13.txt
clickhouse-server.err.log clickhouse-server.log.0.zst stderr.log stress_test_run_0.txt stress_test_run_11.txt stress_test_run_13.txt
stress_test_run_15.txt stress_test_run_2.txt stress_test_run_4.txt stress_test_run_6.txt stress_test_run_8.txt clickhouse-server.log
perf_stress_run.txt stdout.log stress_test_run_10.txt stress_test_run_12.txt
stress_test_run_14.txt stress_test_run_1.txt

View File

@ -315,18 +315,18 @@ stop
# Sanitizer asserts
rg -Fa "==================" /var/log/clickhouse-server/stderr.log | rg -v "in query:" >> /test_output/tmp
rg -Fa "WARNING" /var/log/clickhouse-server/stderr.log >> /test_output/tmp
zgrep -Fav -e "ASan doesn't fully support makecontext/swapcontext functions" -e "DB::Exception" /test_output/tmp > /dev/null \
rg -Fav -e "ASan doesn't fully support makecontext/swapcontext functions" -e "DB::Exception" /test_output/tmp > /dev/null \
&& echo -e 'Sanitizer assert (in stderr.log)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'No sanitizer asserts\tOK' >> /test_output/test_results.tsv
rm -f /test_output/tmp
# OOM
zgrep -Fa " <Fatal> Application: Child process was terminated by signal 9" /var/log/clickhouse-server/clickhouse-server*.log > /dev/null \
rg -Fa " <Fatal> Application: Child process was terminated by signal 9" /var/log/clickhouse-server/clickhouse-server*.log > /dev/null \
&& echo -e 'OOM killer (or signal 9) in clickhouse-server.log\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'No OOM messages in clickhouse-server.log\tOK' >> /test_output/test_results.tsv
# Logical errors
zgrep -Fa "Code: 49, e.displayText() = DB::Exception:" /var/log/clickhouse-server/clickhouse-server*.log > /test_output/logical_errors.txt \
rg -Fa "Code: 49, e.displayText() = DB::Exception:" /var/log/clickhouse-server/clickhouse-server*.log > /test_output/logical_errors.txt \
&& echo -e 'Logical error thrown (see clickhouse-server.log or logical_errors.txt)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'No logical errors\tOK' >> /test_output/test_results.tsv
@ -334,7 +334,7 @@ zgrep -Fa "Code: 49, e.displayText() = DB::Exception:" /var/log/clickhouse-serve
[ -s /test_output/logical_errors.txt ] || rm /test_output/logical_errors.txt
# No such key errors
zgrep -Ea "Code: 499.*The specified key does not exist" /var/log/clickhouse-server/clickhouse-server*.log > /test_output/no_such_key_errors.txt \
rg -Ea "Code: 499.*The specified key does not exist" /var/log/clickhouse-server/clickhouse-server*.log > /test_output/no_such_key_errors.txt \
&& echo -e 'S3_ERROR No such key thrown (see clickhouse-server.log or no_such_key_errors.txt)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'No lost s3 keys\tOK' >> /test_output/test_results.tsv
@ -342,22 +342,22 @@ zgrep -Ea "Code: 499.*The specified key does not exist" /var/log/clickhouse-serv
[ -s /test_output/no_such_key_errors.txt ] || rm /test_output/no_such_key_errors.txt
# Crash
zgrep -Fa "########################################" /var/log/clickhouse-server/clickhouse-server*.log > /dev/null \
rg -Fa "########################################" /var/log/clickhouse-server/clickhouse-server*.log > /dev/null \
&& echo -e 'Killed by signal (in clickhouse-server.log)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'Not crashed\tOK' >> /test_output/test_results.tsv
# It also checks for crash without stacktrace (printed by watchdog)
zgrep -Fa " <Fatal> " /var/log/clickhouse-server/clickhouse-server*.log > /test_output/fatal_messages.txt \
rg -Fa " <Fatal> " /var/log/clickhouse-server/clickhouse-server*.log > /test_output/fatal_messages.txt \
&& echo -e 'Fatal message in clickhouse-server.log (see fatal_messages.txt)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'No fatal messages in clickhouse-server.log\tOK' >> /test_output/test_results.tsv
# Remove file fatal_messages.txt if it's empty
[ -s /test_output/fatal_messages.txt ] || rm /test_output/fatal_messages.txt
zgrep -Fa "########################################" /test_output/* > /dev/null \
rg -Fa "########################################" /test_output/* > /dev/null \
&& echo -e 'Killed by signal (output files)\tFAIL' >> /test_output/test_results.tsv
zgrep -Fa " received signal " /test_output/gdb.log > /dev/null \
rg -Fa " received signal " /test_output/gdb.log > /dev/null \
&& echo -e 'Found signal in gdb.log\tFAIL' >> /test_output/test_results.tsv
if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
@ -379,7 +379,7 @@ if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
mv /var/log/clickhouse-server/clickhouse-server.log /var/log/clickhouse-server/clickhouse-server.clean.log
for table in query_log trace_log
do
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.tsv.zst ||:
done
tar -chf /test_output/coordination.tar /var/lib/clickhouse/coordination ||:
@ -497,7 +497,7 @@ if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
# ("This engine is deprecated and is not supported in transactions", "[Queue = DB::MergeMutateRuntimeQueue]: Code: 235. DB::Exception: Part")
# FIXME https://github.com/ClickHouse/ClickHouse/issues/39174 - bad mutation does not indicate backward incompatibility
echo "Check for Error messages in server log:"
zgrep -Fav -e "Code: 236. DB::Exception: Cancelled merging parts" \
rg -Fav -e "Code: 236. DB::Exception: Cancelled merging parts" \
-e "Code: 236. DB::Exception: Cancelled mutating parts" \
-e "REPLICA_IS_ALREADY_ACTIVE" \
-e "REPLICA_ALREADY_EXISTS" \
@ -533,7 +533,7 @@ if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
-e "MutateFromLogEntryTask" \
-e "No connection to ZooKeeper, cannot get shared table ID" \
-e "Session expired" \
/var/log/clickhouse-server/clickhouse-server.backward.dirty.log | zgrep -Fa "<Error>" > /test_output/bc_check_error_messages.txt \
/var/log/clickhouse-server/clickhouse-server.backward.dirty.log | rg -Fa "<Error>" > /test_output/bc_check_error_messages.txt \
&& echo -e 'Backward compatibility check: Error message in clickhouse-server.log (see bc_check_error_messages.txt)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'Backward compatibility check: No Error messages in clickhouse-server.log\tOK' >> /test_output/test_results.tsv
@ -541,21 +541,21 @@ if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
[ -s /test_output/bc_check_error_messages.txt ] || rm /test_output/bc_check_error_messages.txt
# Sanitizer asserts
zgrep -Fa "==================" /var/log/clickhouse-server/stderr.log >> /test_output/tmp
zgrep -Fa "WARNING" /var/log/clickhouse-server/stderr.log >> /test_output/tmp
zgrep -Fav -e "ASan doesn't fully support makecontext/swapcontext functions" -e "DB::Exception" /test_output/tmp > /dev/null \
rg -Fa "==================" /var/log/clickhouse-server/stderr.log >> /test_output/tmp
rg -Fa "WARNING" /var/log/clickhouse-server/stderr.log >> /test_output/tmp
rg -Fav -e "ASan doesn't fully support makecontext/swapcontext functions" -e "DB::Exception" /test_output/tmp > /dev/null \
&& echo -e 'Backward compatibility check: Sanitizer assert (in stderr.log)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'Backward compatibility check: No sanitizer asserts\tOK' >> /test_output/test_results.tsv
rm -f /test_output/tmp
# OOM
zgrep -Fa " <Fatal> Application: Child process was terminated by signal 9" /var/log/clickhouse-server/clickhouse-server.backward.*.log > /dev/null \
rg -Fa " <Fatal> Application: Child process was terminated by signal 9" /var/log/clickhouse-server/clickhouse-server.backward.*.log > /dev/null \
&& echo -e 'Backward compatibility check: OOM killer (or signal 9) in clickhouse-server.log\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'Backward compatibility check: No OOM messages in clickhouse-server.log\tOK' >> /test_output/test_results.tsv
# Logical errors
echo "Check for Logical errors in server log:"
zgrep -Fa -A20 "Code: 49, e.displayText() = DB::Exception:" /var/log/clickhouse-server/clickhouse-server.backward.*.log > /test_output/bc_check_logical_errors.txt \
rg -Fa -A20 "Code: 49, e.displayText() = DB::Exception:" /var/log/clickhouse-server/clickhouse-server.backward.*.log > /test_output/bc_check_logical_errors.txt \
&& echo -e 'Backward compatibility check: Logical error thrown (see clickhouse-server.log or bc_check_logical_errors.txt)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'Backward compatibility check: No logical errors\tOK' >> /test_output/test_results.tsv
@ -563,13 +563,13 @@ if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
[ -s /test_output/bc_check_logical_errors.txt ] || rm /test_output/bc_check_logical_errors.txt
# Crash
zgrep -Fa "########################################" /var/log/clickhouse-server/clickhouse-server.backward.*.log > /dev/null \
rg -Fa "########################################" /var/log/clickhouse-server/clickhouse-server.backward.*.log > /dev/null \
&& echo -e 'Backward compatibility check: Killed by signal (in clickhouse-server.log)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'Backward compatibility check: Not crashed\tOK' >> /test_output/test_results.tsv
# It also checks for crash without stacktrace (printed by watchdog)
echo "Check for Fatal message in server log:"
zgrep -Fa " <Fatal> " /var/log/clickhouse-server/clickhouse-server.backward.*.log > /test_output/bc_check_fatal_messages.txt \
rg -Fa " <Fatal> " /var/log/clickhouse-server/clickhouse-server.backward.*.log > /test_output/bc_check_fatal_messages.txt \
&& echo -e 'Backward compatibility check: Fatal message in clickhouse-server.log (see bc_check_fatal_messages.txt)\tFAIL' >> /test_output/test_results.tsv \
|| echo -e 'Backward compatibility check: No fatal messages in clickhouse-server.log\tOK' >> /test_output/test_results.tsv
@ -579,7 +579,7 @@ if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
tar -chf /test_output/coordination.backward.tar /var/lib/clickhouse/coordination ||:
for table in query_log trace_log
do
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.backward.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.backward.tsv.zst ||:
done
fi
fi
@ -598,7 +598,7 @@ clickhouse-local --structure "test String, res String" -q "SELECT 'failure', tes
[ -s /test_output/check_status.tsv ] || echo -e "success\tNo errors found" > /test_output/check_status.tsv
# Core dumps
for core in core.*; do
pigz $core
mv $core.gz /test_output/
find . -type f -name 'core.*' | while read core; do
zstd --threads=0 $core
mv $core.zst /test_output/
done

View File

@ -89,7 +89,7 @@ def run_func_test(
def compress_stress_logs(output_path, files_prefix):
cmd = f"cd {output_path} && tar -zcf stress_run_logs.tar.gz {files_prefix}* && rm {files_prefix}*"
cmd = f"cd {output_path} && tar --zstd --create --file=stress_run_logs.tar.zst {files_prefix}* && rm {files_prefix}*"
check_output(cmd, shell=True)

View File

@ -129,7 +129,7 @@ Builds ClickHouse in various configurations for use in further steps. You have t
- `clickhouse`: Main built binary.
- `clickhouse-odbc-bridge`
- `unit_tests_dbms`: GoogleTest binary with ClickHouse unit tests.
- `performance.tgz`: Special package for performance tests.
- `performance.tar.zst`: Special package for performance tests.
## Special Build Check

View File

@ -96,7 +96,7 @@ git push
- `clickhouse`: Main built binary.
- `clickhouse-odbc-bridge`
- `unit_tests_dbms`: 带有 ClickHouse 单元测试的 GoogleTest 二进制文件.
- `performance.tgz`: 用于性能测试的特殊包.
- `performance.tar.zst`: 用于性能测试的特殊包.
## 特殊构建检查 {#special-buildcheck}
使用clang-tidy执行静态分析和代码样式检查. 该报告类似于构建检查. 修复在构建日志中发现的错误.

View File

@ -115,10 +115,10 @@ if __name__ == "__main__":
paths = {
"run.log": run_log_path,
"main.log": os.path.join(workspace_path, "main.log"),
"server.log.gz": os.path.join(workspace_path, "server.log.gz"),
"server.log.zst": os.path.join(workspace_path, "server.log.zst"),
"fuzzer.log": os.path.join(workspace_path, "fuzzer.log"),
"report.html": os.path.join(workspace_path, "report.html"),
"core.gz": os.path.join(workspace_path, "core.gz"),
"core.zst": os.path.join(workspace_path, "core.zst"),
}
s3_helper = S3Helper()

View File

@ -253,7 +253,7 @@ def main():
s3_path_prefix = "/".join((release_or_pr, pr_info.sha, build_name))
# FIXME performance
s3_performance_path = "/".join(
(performance_pr, pr_info.sha, build_name, "performance.tgz")
(performance_pr, pr_info.sha, build_name, "performance.tar.zst")
)
# If this is rerun, then we try to find already created artifacts and just
@ -331,13 +331,13 @@ def main():
# FIXME performance
performance_urls = []
performance_path = os.path.join(build_output_path, "performance.tgz")
performance_path = os.path.join(build_output_path, "performance.tar.zst")
if os.path.exists(performance_path):
performance_urls.append(
s3_helper.upload_build_file_to_s3(performance_path, s3_performance_path)
)
logging.info(
"Uploaded performance.tgz to %s, now delete to avoid duplication",
"Uploaded performance.tar.zst to %s, now delete to avoid duplication",
performance_urls[0],
)
os.remove(performance_path)

View File

@ -140,5 +140,8 @@ def download_clickhouse_binary(check_name, reports_path, result_path):
def download_performance_build(check_name, reports_path, result_path):
download_builds_filter(
check_name, reports_path, result_path, lambda x: x.endswith("performance.tgz")
check_name,
reports_path,
result_path,
lambda x: x.endswith("performance.tar.zst"),
)

View File

@ -59,7 +59,7 @@ def group_by_artifacts(build_urls: List[str]) -> Dict[str, List[str]]:
"performance": [],
} # type: Dict[str, List[str]]
for url in build_urls:
if url.endswith("performance.tgz"):
if url.endswith("performance.tar.zst"):
groups["performance"].append(url)
elif (
url.endswith(".deb")

View File

@ -116,7 +116,7 @@ def get_ccache_if_not_exists(
def upload_ccache(path_to_ccache_dir, s3_helper, current_pr_number, temp_path):
logging.info("Uploading cache %s for pr %s", path_to_ccache_dir, current_pr_number)
ccache_name = os.path.basename(path_to_ccache_dir)
compressed_cache_path = os.path.join(temp_path, ccache_name + ".tar.gz")
compressed_cache_path = os.path.join(temp_path, ccache_name + ".tar.zst")
compress_fast(path_to_ccache_dir, compressed_cache_path)
s3_path = (

View File

@ -5,19 +5,24 @@ import os
def compress_file_fast(path, archive_path):
if os.path.exists("/usr/bin/pigz"):
if archive_path.endswith(".zst"):
subprocess.check_call("zstd < {} > {}".format(path, archive_path), shell=True)
elif os.path.exists("/usr/bin/pigz"):
subprocess.check_call("pigz < {} > {}".format(path, archive_path), shell=True)
else:
subprocess.check_call("gzip < {} > {}".format(path, archive_path), shell=True)
def compress_fast(path, archive_path, exclude=None):
pigz_part = ""
if os.path.exists("/usr/bin/pigz"):
program_part = ""
if archive_path.endswith(".zst"):
logging.info("zstd will be used for compression")
program_part = "--use-compress-program='zstd --threads=0'"
elif os.path.exists("/usr/bin/pigz"):
logging.info("pigz found, will compress and decompress faster")
pigz_part = "--use-compress-program='pigz'"
program_part = "--use-compress-program='pigz'"
else:
pigz_part = "-z"
program_part = "-z"
logging.info("no pigz, compressing with default tar")
if exclude is None:
@ -32,24 +37,32 @@ def compress_fast(path, archive_path, exclude=None):
path = os.path.dirname(path)
else:
path += "/.."
cmd = "tar {} {} -cf {} -C {} {}".format(
pigz_part, exclude_part, archive_path, path, fname
program_part, exclude_part, archive_path, path, fname
)
logging.debug("compress_fast cmd: %s", cmd)
subprocess.check_call(cmd, shell=True)
def decompress_fast(archive_path, result_path=None):
pigz_part = ""
if os.path.exists("/usr/bin/pigz"):
program_part = ""
if archive_path.endswith(".zst"):
logging.info(
"zstd will be used for decompression ('%s' -> '%s')",
archive_path,
result_path,
)
program_part = "--use-compress-program='zstd --threads=0'"
elif os.path.exists("/usr/bin/pigz"):
logging.info(
"pigz found, will compress and decompress faster ('%s' -> '%s')",
archive_path,
result_path,
)
pigz_part = "--use-compress-program='pigz'"
program_part = "--use-compress-program='pigz'"
else:
pigz_part = "-z"
program_part = "-z"
logging.info(
"no pigz, decompressing with default tar ('%s' -> '%s')",
archive_path,
@ -58,10 +71,10 @@ def decompress_fast(archive_path, result_path=None):
if result_path is None:
subprocess.check_call(
"tar {} -xf {}".format(pigz_part, archive_path), shell=True
"tar {} -xf {}".format(program_part, archive_path), shell=True
)
else:
subprocess.check_call(
"tar {} -xf {} -C {}".format(pigz_part, archive_path, result_path),
"tar {} -xf {} -C {}".format(program_part, archive_path, result_path),
shell=True,
)

View File

@ -272,9 +272,9 @@ if __name__ == "__main__":
compress_fast(
os.path.join(result_path, "store"),
os.path.join(result_path, "jepsen_store.tar.gz"),
os.path.join(result_path, "jepsen_store.tar.zst"),
)
additional_data.append(os.path.join(result_path, "jepsen_store.tar.gz"))
additional_data.append(os.path.join(result_path, "jepsen_store.tar.zst"))
except Exception as ex:
print("Exception", ex)
status = "failure"

View File

@ -89,16 +89,16 @@ class S3Helper:
logging.info("No content type provied for %s", file_path)
else:
if re.search(r"\.(txt|log|err|out)$", s3_path) or re.search(
r"\.log\..*(?<!\.gz)$", s3_path
r"\.log\..*(?<!\.zst)$", s3_path
):
logging.info(
"Going to compress file log file %s to %s",
file_path,
file_path + ".gz",
file_path + ".zst",
)
compress_file_fast(file_path, file_path + ".gz")
file_path += ".gz"
s3_path += ".gz"
compress_file_fast(file_path, file_path + ".zst")
file_path += ".zst"
s3_path += ".zst"
else:
logging.info("Processing file without compression")
logging.info("File is too large, do not provide content type")

View File

@ -43,6 +43,8 @@ apt-get install --yes --no-install-recommends \
jq \
lsb-release \
pigz \
ripgrep \
zstd \
python3-dev \
python3-pip \
qemu-user-static \

View File

@ -82,7 +82,7 @@
[path]
(do
(c/exec :mkdir :-p (str root-folder "/unpacked"))
(c/exec :tar :-zxvf path :-C (str root-folder "/unpacked"))
(c/exec :tar :-xvf path :-C (str root-folder "/unpacked"))
(c/exec :rm :-f path)
(let [subdir (c/exec :ls (str root-folder "/unpacked"))]
(c/exec :mv (str root-folder "/unpacked/" subdir "/usr/bin/clickhouse") root-folder)