This commit is contained in:
Alexey Milovidov 2023-01-01 21:17:43 +01:00
parent fa1d595059
commit 4694dad87c
15 changed files with 67 additions and 53 deletions

View File

@ -107,7 +107,7 @@ jobs:
run: |
curl --form token="${COVERITY_TOKEN}" \
--form email='security+coverity@clickhouse.com' \
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tgz" \
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tar.zst" \
--form version="${GITHUB_REF#refs/heads/}-${GITHUB_SHA::6}" \
--form description="Nighly Scan: $(date +'%Y-%m-%dT%H:%M:%S')" \
https://scan.coverity.com/builds?project=ClickHouse%2FClickHouse

View File

@ -159,7 +159,7 @@ then
git -C "$PERF_OUTPUT"/ch log -5
(
cd "$PERF_OUTPUT"/..
tar -cv -I pigz -f /output/performance.tgz output
tar -cv --zstd -f /output/performance.tar.zst output
)
fi
@ -167,15 +167,15 @@ fi
if [ "" != "$COMBINED_OUTPUT" ]
then
prepare_combined_output /output
tar -cv -I pigz -f "$COMBINED_OUTPUT.tgz" /output
tar -cv --zstd -f "$COMBINED_OUTPUT.tar.zst" /output
rm -r /output/*
mv "$COMBINED_OUTPUT.tgz" /output
mv "$COMBINED_OUTPUT.tar.zst" /output
fi
if [ "coverity" == "$COMBINED_OUTPUT" ]
then
tar -cv -I pigz -f "coverity-scan.tgz" cov-int
mv "coverity-scan.tgz" /output
tar -cv --zstd -f "coverity-scan.tar.zst" cov-int
mv "coverity-scan.tar.zst" /output
fi
ccache_status

View File

@ -188,7 +188,7 @@ function build
cp programs/clickhouse "$FASTTEST_OUTPUT/clickhouse"
strip programs/clickhouse -o "$FASTTEST_OUTPUT/clickhouse-stripped"
gzip "$FASTTEST_OUTPUT/clickhouse-stripped"
zstd --threads=0 "$FASTTEST_OUTPUT/clickhouse-stripped"
fi
ccache --show-stats ||:
ccache --evict-older-than 1d ||:

View File

@ -325,8 +325,8 @@ quit
fi
if test -f core.*; then
pigz core.*
mv core.*.gz core.gz
zstd --threads=0 core.*
mv core.*.zst core.zst
fi
dmesg -T | rg -q -F -e 'Out of memory: Killed process' -e 'oom_reaper: reaped process' -e 'oom-kill:constraint=CONSTRAINT_NONE' && echo "OOM in dmesg" ||:
@ -363,13 +363,13 @@ case "$stage" in
"report")
CORE_LINK=''
if [ -f core.gz ]; then
CORE_LINK='<a href="core.gz">core.gz</a>'
if [ -f core.zst ]; then
CORE_LINK='<a href="core.zst">core.zst</a>'
fi
rg --text -F '<Fatal>' server.log > fatal.log ||:
pigz server.log
zstd --threads=0 server.log
cat > report.html <<EOF ||:
<!DOCTYPE html>
@ -394,7 +394,7 @@ p.links a { padding: 5px; margin: 3px; background: #FFF; line-height: 2; white-s
<p class="links">
<a href="run.log">run.log</a>
<a href="fuzzer.log">fuzzer.log</a>
<a href="server.log.gz">server.log.gz</a>
<a href="server.log.zst">server.log.zst</a>
<a href="main.log">main.log</a>
${CORE_LINK}
</p>

View File

@ -154,19 +154,19 @@ fi
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.zst ||:
# FIXME: remove once only github actions will be left
rm /var/log/clickhouse-server/clickhouse-server.log
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
if [[ -n "$WITH_COVERAGE" ]] && [[ "$WITH_COVERAGE" -eq 1 ]]; then
tar -chf /test_output/clickhouse_coverage.tar.gz /profraw ||:
tar --zstd -c -h -f /test_output/clickhouse_coverage.tar.zst /profraw ||:
fi
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server1.log ||:
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server2.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.gz ||:
pigz < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.gz ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.zst ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.zst ||:
# FIXME: remove once only github actions will be left
rm /var/log/clickhouse-server/clickhouse-server1.log
rm /var/log/clickhouse-server/clickhouse-server2.log

View File

@ -168,7 +168,7 @@ if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]
fi
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz &
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.zst &
# Compress tables.
#
@ -179,10 +179,10 @@ pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhous
# for files >64MB, we want this files to be compressed explicitly
for table in query_log zookeeper_log trace_log transactions_info_log
do
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.tsv.zst ||:
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
clickhouse-local --path /var/lib/clickhouse1/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.1.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse2/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.2.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse1/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.1.tsv.zst ||:
clickhouse-local --path /var/lib/clickhouse2/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.2.tsv.zst ||:
fi
done
@ -199,7 +199,7 @@ do
order by samples desc
settings allow_introspection_functions = 1
format TabSeparated" \
| pigz > "/test_output/trace-log-$trace_type-flamegraph.tsv.gz" ||:
| zstd --threads=0 > "/test_output/trace-log-$trace_type-flamegraph.tsv.zst" ||:
done
@ -207,7 +207,7 @@ done
rm /var/log/clickhouse-server/clickhouse-server.log
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
if [[ -n "$WITH_COVERAGE" ]] && [[ "$WITH_COVERAGE" -eq 1 ]]; then
tar -chf /test_output/clickhouse_coverage.tar.gz /profraw ||:
tar --zstd -chf /test_output/clickhouse_coverage.tar.zst /profraw ||:
fi
tar -chf /test_output/coordination.tar /var/lib/clickhouse/coordination ||:
@ -215,8 +215,8 @@ tar -chf /test_output/coordination.tar /var/lib/clickhouse/coordination ||:
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server1.log ||:
rg -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server2.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.gz ||:
pigz < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.gz ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.zst ||:
zstd --threads=0 < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.zst ||:
# FIXME: remove once only github actions will be left
rm /var/log/clickhouse-server/clickhouse-server1.log
rm /var/log/clickhouse-server/clickhouse-server2.log

View File

@ -22,7 +22,7 @@ Still alive
2018-10-22 13:49:16,195 Stress is ok
2018-10-22 13:49:16,195 Copying server log files
$ ls $HOME/test_result
clickhouse-server.err.log clickhouse-server.log.0.gz stderr.log stress_test_run_0.txt stress_test_run_11.txt stress_test_run_13.txt
clickhouse-server.err.log clickhouse-server.log.0.zst stderr.log stress_test_run_0.txt stress_test_run_11.txt stress_test_run_13.txt
stress_test_run_15.txt stress_test_run_2.txt stress_test_run_4.txt stress_test_run_6.txt stress_test_run_8.txt clickhouse-server.log
perf_stress_run.txt stdout.log stress_test_run_10.txt stress_test_run_12.txt
stress_test_run_14.txt stress_test_run_1.txt

View File

@ -378,7 +378,7 @@ if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
mv /var/log/clickhouse-server/clickhouse-server.log /var/log/clickhouse-server/clickhouse-server.clean.log
for table in query_log trace_log
do
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.tsv.zst ||:
done
tar -chf /test_output/coordination.tar /var/lib/clickhouse/coordination ||:
@ -578,7 +578,7 @@ if [ "$DISABLE_BC_CHECK" -ne "1" ]; then
tar -chf /test_output/coordination.backward.tar /var/lib/clickhouse/coordination ||:
for table in query_log trace_log
do
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.backward.tsv.gz ||:
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | zstd --threads=0 > /test_output/$table.backward.tsv.zst ||:
done
fi
fi
@ -598,6 +598,6 @@ clickhouse-local --structure "test String, res String" -q "SELECT 'failure', tes
# Core dumps
for core in core.*; do
pigz $core
mv $core.gz /test_output/
zstd --threads=0 $core
mv $core.zst /test_output/
done

View File

@ -89,7 +89,7 @@ def run_func_test(
def compress_stress_logs(output_path, files_prefix):
cmd = f"cd {output_path} && tar -zcf stress_run_logs.tar.gz {files_prefix}* && rm {files_prefix}*"
cmd = f"cd {output_path} && tar --zstd -zcf stress_run_logs.tar.zst {files_prefix}* && rm {files_prefix}*"
check_output(cmd, shell=True)

View File

@ -115,10 +115,10 @@ if __name__ == "__main__":
paths = {
"run.log": run_log_path,
"main.log": os.path.join(workspace_path, "main.log"),
"server.log.gz": os.path.join(workspace_path, "server.log.gz"),
"server.log.zst": os.path.join(workspace_path, "server.log.zst"),
"fuzzer.log": os.path.join(workspace_path, "fuzzer.log"),
"report.html": os.path.join(workspace_path, "report.html"),
"core.gz": os.path.join(workspace_path, "core.gz"),
"core.zst": os.path.join(workspace_path, "core.zst"),
}
s3_helper = S3Helper()

View File

@ -116,7 +116,7 @@ def get_ccache_if_not_exists(
def upload_ccache(path_to_ccache_dir, s3_helper, current_pr_number, temp_path):
logging.info("Uploading cache %s for pr %s", path_to_ccache_dir, current_pr_number)
ccache_name = os.path.basename(path_to_ccache_dir)
compressed_cache_path = os.path.join(temp_path, ccache_name + ".tar.gz")
compressed_cache_path = os.path.join(temp_path, ccache_name + ".tar.zst")
compress_fast(path_to_ccache_dir, compressed_cache_path)
s3_path = (

View File

@ -5,19 +5,24 @@ import os
def compress_file_fast(path, archive_path):
if os.path.exists("/usr/bin/pigz"):
if archive_path.endswith(".zst"):
subprocess.check_call("zstd < {} > {}".format(path, archive_path), shell=True)
elif os.path.exists("/usr/bin/pigz"):
subprocess.check_call("pigz < {} > {}".format(path, archive_path), shell=True)
else:
subprocess.check_call("gzip < {} > {}".format(path, archive_path), shell=True)
def compress_fast(path, archive_path, exclude=None):
pigz_part = ""
if os.path.exists("/usr/bin/pigz"):
program_part = ""
if archive_path.endswith(".zst"):
logging.info("zstd will be used for compression")
program_part = "--use-compress-program='zstd --threads=0'"
elif os.path.exists("/usr/bin/pigz"):
logging.info("pigz found, will compress and decompress faster")
pigz_part = "--use-compress-program='pigz'"
program_part = "--use-compress-program='pigz'"
else:
pigz_part = "-z"
program_part = "-z"
logging.info("no pigz, compressing with default tar")
if exclude is None:
@ -32,24 +37,31 @@ def compress_fast(path, archive_path, exclude=None):
path = os.path.dirname(path)
else:
path += "/.."
cmd = "tar {} {} -cf {} -C {} {}".format(
pigz_part, exclude_part, archive_path, path, fname
program_part, exclude_part, archive_path, path, fname
)
logging.debug("compress_fast cmd: %s", cmd)
subprocess.check_call(cmd, shell=True)
def decompress_fast(archive_path, result_path=None):
pigz_part = ""
if os.path.exists("/usr/bin/pigz"):
program_part = ""
if archive_path.endswith(".zst"):
logging.info("zstd will be used for decompression ('%s' -> '%s')",
archive_path,
result_path,
)
program_part = "--use-compress-program='zstd --threads=0'"
elif os.path.exists("/usr/bin/pigz"):
logging.info(
"pigz found, will compress and decompress faster ('%s' -> '%s')",
archive_path,
result_path,
)
pigz_part = "--use-compress-program='pigz'"
program_part = "--use-compress-program='pigz'"
else:
pigz_part = "-z"
program_part = "-z"
logging.info(
"no pigz, decompressing with default tar ('%s' -> '%s')",
archive_path,
@ -58,10 +70,10 @@ def decompress_fast(archive_path, result_path=None):
if result_path is None:
subprocess.check_call(
"tar {} -xf {}".format(pigz_part, archive_path), shell=True
"tar {} -xf {}".format(program_part, archive_path), shell=True
)
else:
subprocess.check_call(
"tar {} -xf {} -C {}".format(pigz_part, archive_path, result_path),
"tar {} -xf {} -C {}".format(program_part, archive_path, result_path),
shell=True,
)

View File

@ -272,9 +272,9 @@ if __name__ == "__main__":
compress_fast(
os.path.join(result_path, "store"),
os.path.join(result_path, "jepsen_store.tar.gz"),
os.path.join(result_path, "jepsen_store.tar.zst"),
)
additional_data.append(os.path.join(result_path, "jepsen_store.tar.gz"))
additional_data.append(os.path.join(result_path, "jepsen_store.tar.zst"))
except Exception as ex:
print("Exception", ex)
status = "failure"

View File

@ -89,16 +89,16 @@ class S3Helper:
logging.info("No content type provied for %s", file_path)
else:
if re.search(r"\.(txt|log|err|out)$", s3_path) or re.search(
r"\.log\..*(?<!\.gz)$", s3_path
r"\.log\..*(?<!\.zst)$", s3_path
):
logging.info(
"Going to compress file log file %s to %s",
file_path,
file_path + ".gz",
file_path + ".zst",
)
compress_file_fast(file_path, file_path + ".gz")
file_path += ".gz"
s3_path += ".gz"
compress_file_fast(file_path, file_path + ".zst")
file_path += ".zst"
s3_path += ".zst"
else:
logging.info("Processing file without compression")
logging.info("File is too large, do not provide content type")

View File

@ -43,6 +43,8 @@ apt-get install --yes --no-install-recommends \
jq \
lsb-release \
pigz \
ripgrep \
zstd \
python3-dev \
python3-pip \
qemu-user-static \