From 3142921bb4dcb8b7169f7d32a05110c9a5baa351 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Sun, 29 Oct 2023 11:15:11 +0100 Subject: [PATCH 01/99] Taming query profiler --- src/Common/ProfileEvents.cpp | 1 + src/Common/QueryProfiler.cpp | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/src/Common/ProfileEvents.cpp b/src/Common/ProfileEvents.cpp index f4d7242f70a..aadbc8b2471 100644 --- a/src/Common/ProfileEvents.cpp +++ b/src/Common/ProfileEvents.cpp @@ -316,6 +316,7 @@ The server successfully detected this situation and will download merged part fr \ M(CannotWriteToWriteBufferDiscard, "Number of stack traces dropped by query profiler or signal handler because pipe is full or cannot write to pipe.") \ M(QueryProfilerSignalOverruns, "Number of times we drop processing of a query profiler signal due to overrun plus the number of signals that OS has not delivered due to overrun.") \ + M(QueryProfilerConcurrencyOverruns, "Number of times we drop processing of a query profiler signal due to too many concurrent query profilers in other threads, which may indicate overload.") \ M(QueryProfilerRuns, "Number of times QueryProfiler had been run.") \ \ M(CreatedLogEntryForMerge, "Successfully created log entry to merge parts in ReplicatedMergeTree.") \ diff --git a/src/Common/QueryProfiler.cpp b/src/Common/QueryProfiler.cpp index dc9f3610513..c656e7f992f 100644 --- a/src/Common/QueryProfiler.cpp +++ b/src/Common/QueryProfiler.cpp @@ -22,6 +22,7 @@ namespace CurrentMetrics namespace ProfileEvents { extern const Event QueryProfilerSignalOverruns; + extern const Event QueryProfilerConcurrencyOverruns; extern const Event QueryProfilerRuns; } @@ -40,8 +41,19 @@ namespace /// to ignore delivered signals after timer_delete(). thread_local bool signal_handler_disarmed = true; + /// Don't permit too many threads be busy inside profiler, + /// which could slow down the system in some environments. + std::atomic concurrent_invocations = 0; + void writeTraceInfo(TraceType trace_type, int /* sig */, siginfo_t * info, void * context) { + SCOPE_EXIT({ concurrent_invocations.fetch_sub(1, std::memory_order_relaxed); }); + if (concurrent_invocations.fetch_add(1, std::memory_order_relaxed) > 100) + { + ProfileEvents::incrementNoTrace(ProfileEvents::QueryProfilerConcurrencyOverruns); + return; + } + auto saved_errno = errno; /// We must restore previous value of errno in signal handler. #if defined(OS_LINUX) From e15815ee974dafada9ec0de9996d2d29eb26e6cb Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Sun, 29 Oct 2023 11:32:45 +0100 Subject: [PATCH 02/99] Add a test --- ...ry_profiler_concurrency_overruns.reference | 1 + ...907_query_profiler_concurrency_overruns.sh | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference create mode 100755 tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh diff --git a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference new file mode 100644 index 00000000000..8f75b7cccf2 --- /dev/null +++ b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference @@ -0,0 +1 @@ +1000000000 1 1 diff --git a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh new file mode 100755 index 00000000000..f3f37704e23 --- /dev/null +++ b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +# Tags: no-tsan, no-asan, no-ubsan, no-msan, no-debug, no-fasttest, no-cpu-aarch64 + +CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +# shellcheck source=../shell_config.sh +. "$CUR_DIR"/../shell_config.sh + +# The check is probablistic, so make sure that it passes at least sometimes: + +while true +do + ${CLICKHOUSE_CLIENT} -n --query=" + SELECT count() FROM numbers_mt(1000000000) SETTINGS + query_profiler_real_time_period_ns = 1000000, + query_profiler_cpu_time_period_ns = 1000000, + max_threads = 1000; + SELECT anyIf(value, event = 'QueryProfilerRuns') > 0, anyIf(value, event = 'QueryProfilerConcurrencyOverruns') > 0 FROM system.events; + " | tr '\t\n' ' ' | grep '1000000000 1 1' && break + sleep 1 +done From f85e9138da6990fa95ec1c757cdf6207e6040ddc Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 30 Oct 2023 02:31:54 +0300 Subject: [PATCH 03/99] Update 02907_query_profiler_concurrency_overruns.sh --- .../0_stateless/02907_query_profiler_concurrency_overruns.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh index f3f37704e23..c43889d78b2 100755 --- a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh +++ b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh @@ -10,10 +10,11 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) while true do ${CLICKHOUSE_CLIENT} -n --query=" - SELECT count() FROM numbers_mt(1000000000) SETTINGS + SELECT count() FROM numbers_mt(1000000) SETTINGS query_profiler_real_time_period_ns = 1000000, query_profiler_cpu_time_period_ns = 1000000, - max_threads = 1000; + max_threads = 1000, + max_block_size = 100; SELECT anyIf(value, event = 'QueryProfilerRuns') > 0, anyIf(value, event = 'QueryProfilerConcurrencyOverruns') > 0 FROM system.events; " | tr '\t\n' ' ' | grep '1000000000 1 1' && break sleep 1 From c544a0221010d812c68fb805aee3eed1a252b50c Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 30 Oct 2023 02:32:22 +0300 Subject: [PATCH 04/99] Update 02907_query_profiler_concurrency_overruns.sh --- .../0_stateless/02907_query_profiler_concurrency_overruns.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh index c43889d78b2..7c5e4209124 100755 --- a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh +++ b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh @@ -10,12 +10,12 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) while true do ${CLICKHOUSE_CLIENT} -n --query=" - SELECT count() FROM numbers_mt(1000000) SETTINGS + SELECT count() FROM zeros_mt(1000000) SETTINGS query_profiler_real_time_period_ns = 1000000, query_profiler_cpu_time_period_ns = 1000000, max_threads = 1000, max_block_size = 100; SELECT anyIf(value, event = 'QueryProfilerRuns') > 0, anyIf(value, event = 'QueryProfilerConcurrencyOverruns') > 0 FROM system.events; - " | tr '\t\n' ' ' | grep '1000000000 1 1' && break + " | tr '\t\n' ' ' | grep '1000000 1 1' && break sleep 1 done From e6644c17736a07ad32dcf0a848dec05a94a3505a Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 30 Oct 2023 02:32:32 +0300 Subject: [PATCH 05/99] Update 02907_query_profiler_concurrency_overruns.reference --- .../02907_query_profiler_concurrency_overruns.reference | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference index 8f75b7cccf2..45d53fbec54 100644 --- a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference +++ b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference @@ -1 +1 @@ -1000000000 1 1 +1000000 1 1 From 136f9841540eee70917815728765f51fe916fb11 Mon Sep 17 00:00:00 2001 From: avogar Date: Tue, 19 Sep 2023 15:49:25 +0000 Subject: [PATCH 06/99] Update arrow to release-13.0.0 --- contrib/arrow | 2 +- contrib/arrow-cmake/CMakeLists.txt | 51 ++++++++---------- .../02735_parquet_encoder.reference | 8 +-- .../02884_parquet_new_encodings.reference | 1 + .../02884_parquet_new_encodings.sh | 9 ++++ .../delta_lenght_byte_array_encoding.parquet | Bin 0 -> 2795 bytes 6 files changed, 37 insertions(+), 34 deletions(-) create mode 100644 tests/queries/0_stateless/02884_parquet_new_encodings.reference create mode 100755 tests/queries/0_stateless/02884_parquet_new_encodings.sh create mode 100644 tests/queries/0_stateless/data_parquet/delta_lenght_byte_array_encoding.parquet diff --git a/contrib/arrow b/contrib/arrow index 1d93838f69a..9d9c464ce68 160000 --- a/contrib/arrow +++ b/contrib/arrow @@ -1 +1 @@ -Subproject commit 1d93838f69a802639ca144ea5704a98e2481810d +Subproject commit 9d9c464ce6883f52aaca9f913eec4cd50006c767 diff --git a/contrib/arrow-cmake/CMakeLists.txt b/contrib/arrow-cmake/CMakeLists.txt index 02e809c560f..c45d75bb3f2 100644 --- a/contrib/arrow-cmake/CMakeLists.txt +++ b/contrib/arrow-cmake/CMakeLists.txt @@ -230,6 +230,8 @@ set(ARROW_SRCS "${LIBRARY_DIR}/array/builder_nested.cc" "${LIBRARY_DIR}/array/builder_primitive.cc" "${LIBRARY_DIR}/array/builder_union.cc" + "${LIBRARY_DIR}/array/builder_run_end.cc" + "${LIBRARY_DIR}/array/array_run_end.cc" "${LIBRARY_DIR}/array/concatenate.cc" "${LIBRARY_DIR}/array/data.cc" "${LIBRARY_DIR}/array/diff.cc" @@ -309,9 +311,12 @@ set(ARROW_SRCS "${LIBRARY_DIR}/util/debug.cc" "${LIBRARY_DIR}/util/tracing.cc" "${LIBRARY_DIR}/util/atfork_internal.cc" + "${LIBRARY_DIR}/util/crc32.cc" + "${LIBRARY_DIR}/util/hashing.cc" + "${LIBRARY_DIR}/util/ree_util.cc" + "${LIBRARY_DIR}/util/union_util.cc" "${LIBRARY_DIR}/vendored/base64.cpp" "${LIBRARY_DIR}/vendored/datetime/tz.cpp" - "${LIBRARY_DIR}/vendored/musl/strptime.c" "${LIBRARY_DIR}/vendored/uriparser/UriCommon.c" "${LIBRARY_DIR}/vendored/uriparser/UriCompare.c" @@ -328,39 +333,20 @@ set(ARROW_SRCS "${LIBRARY_DIR}/vendored/uriparser/UriRecompose.c" "${LIBRARY_DIR}/vendored/uriparser/UriResolve.c" "${LIBRARY_DIR}/vendored/uriparser/UriShorten.c" + "${LIBRARY_DIR}/vendored/double-conversion/bignum.cc" + "${LIBRARY_DIR}/vendored/double-conversion/bignum-dtoa.cc" + "${LIBRARY_DIR}/vendored/double-conversion/cached-powers.cc" + "${LIBRARY_DIR}/vendored/double-conversion/double-to-string.cc" + "${LIBRARY_DIR}/vendored/double-conversion/fast-dtoa.cc" + "${LIBRARY_DIR}/vendored/double-conversion/fixed-dtoa.cc" + "${LIBRARY_DIR}/vendored/double-conversion/string-to-double.cc" + "${LIBRARY_DIR}/vendored/double-conversion/strtod.cc" "${LIBRARY_DIR}/compute/api_aggregate.cc" "${LIBRARY_DIR}/compute/api_scalar.cc" "${LIBRARY_DIR}/compute/api_vector.cc" "${LIBRARY_DIR}/compute/cast.cc" "${LIBRARY_DIR}/compute/exec.cc" - "${LIBRARY_DIR}/compute/exec/accumulation_queue.cc" - "${LIBRARY_DIR}/compute/exec/accumulation_queue.h" - "${LIBRARY_DIR}/compute/exec/aggregate.cc" - "${LIBRARY_DIR}/compute/exec/aggregate_node.cc" - "${LIBRARY_DIR}/compute/exec/asof_join_node.cc" - "${LIBRARY_DIR}/compute/exec/bloom_filter.cc" - "${LIBRARY_DIR}/compute/exec/exec_plan.cc" - "${LIBRARY_DIR}/compute/exec/expression.cc" - "${LIBRARY_DIR}/compute/exec/filter_node.cc" - "${LIBRARY_DIR}/compute/exec/hash_join.cc" - "${LIBRARY_DIR}/compute/exec/hash_join_dict.cc" - "${LIBRARY_DIR}/compute/exec/hash_join_node.cc" - "${LIBRARY_DIR}/compute/exec/key_hash.cc" - "${LIBRARY_DIR}/compute/exec/key_map.cc" - "${LIBRARY_DIR}/compute/exec/map_node.cc" - "${LIBRARY_DIR}/compute/exec/options.cc" - "${LIBRARY_DIR}/compute/exec/order_by_impl.cc" - "${LIBRARY_DIR}/compute/exec/partition_util.cc" - "${LIBRARY_DIR}/compute/exec/project_node.cc" - "${LIBRARY_DIR}/compute/exec/query_context.cc" - "${LIBRARY_DIR}/compute/exec/sink_node.cc" - "${LIBRARY_DIR}/compute/exec/source_node.cc" - "${LIBRARY_DIR}/compute/exec/swiss_join.cc" - "${LIBRARY_DIR}/compute/exec/task_util.cc" - "${LIBRARY_DIR}/compute/exec/tpch_node.cc" - "${LIBRARY_DIR}/compute/exec/union_node.cc" - "${LIBRARY_DIR}/compute/exec/util.cc" "${LIBRARY_DIR}/compute/function.cc" "${LIBRARY_DIR}/compute/function_internal.cc" "${LIBRARY_DIR}/compute/kernel.cc" @@ -403,8 +389,13 @@ set(ARROW_SRCS "${LIBRARY_DIR}/compute/kernels/vector_select_k.cc" "${LIBRARY_DIR}/compute/kernels/vector_selection.cc" "${LIBRARY_DIR}/compute/kernels/vector_sort.cc" + "${LIBRARY_DIR}/compute/kernels/vector_selection_internal.cc" + "${LIBRARY_DIR}/compute/kernels/vector_selection_filter_internal.cc" + "${LIBRARY_DIR}/compute/kernels/vector_selection_take_internal.cc" "${LIBRARY_DIR}/compute/light_array.cc" "${LIBRARY_DIR}/compute/registry.cc" + "${LIBRARY_DIR}/compute/expression.cc" + "${LIBRARY_DIR}/compute/ordering.cc" "${LIBRARY_DIR}/compute/row/compare_internal.cc" "${LIBRARY_DIR}/compute/row/encode_internal.cc" "${LIBRARY_DIR}/compute/row/grouper.cc" @@ -488,10 +479,10 @@ set(PARQUET_SRCS "${LIBRARY_DIR}/exception.cc" "${LIBRARY_DIR}/file_reader.cc" "${LIBRARY_DIR}/file_writer.cc" + "${LIBRARY_DIR}/page_index.cc" "${LIBRARY_DIR}/level_conversion.cc" "${LIBRARY_DIR}/level_comparison.cc" "${LIBRARY_DIR}/metadata.cc" - "${LIBRARY_DIR}/murmur3.cc" "${LIBRARY_DIR}/platform.cc" "${LIBRARY_DIR}/printer.cc" "${LIBRARY_DIR}/properties.cc" @@ -500,6 +491,8 @@ set(PARQUET_SRCS "${LIBRARY_DIR}/stream_reader.cc" "${LIBRARY_DIR}/stream_writer.cc" "${LIBRARY_DIR}/types.cc" + "${LIBRARY_DIR}/bloom_filter_reader.cc" + "${LIBRARY_DIR}/xxhasher.cc" "${GEN_LIBRARY_DIR}/parquet_constants.cpp" "${GEN_LIBRARY_DIR}/parquet_types.cpp" diff --git a/tests/queries/0_stateless/02735_parquet_encoder.reference b/tests/queries/0_stateless/02735_parquet_encoder.reference index a7ee82bc67f..143fde3093f 100644 --- a/tests/queries/0_stateless/02735_parquet_encoder.reference +++ b/tests/queries/0_stateless/02735_parquet_encoder.reference @@ -36,11 +36,11 @@ ipv6 Nullable(FixedString(16)) 1 1000000 1 3914219105369203805 4 1000000 1 -(1000000,0,NULL,'100','299') -(1000000,0,NULL,'0','-1294970296') -(1000000,0,NULL,'-2147483296','2147481000') +(1000000,NULL,NULL,'100','299') +(1000000,NULL,NULL,'0','-1294970296') +(1000000,NULL,NULL,'-2147483296','2147481000') (100000,900000,NULL,'100009','999999') -[(2,0,NULL,'','[]')] +[(2,NULL,NULL,'','[]')] 1 1 0 1 5090915589685802007 diff --git a/tests/queries/0_stateless/02884_parquet_new_encodings.reference b/tests/queries/0_stateless/02884_parquet_new_encodings.reference new file mode 100644 index 00000000000..1034f208e18 --- /dev/null +++ b/tests/queries/0_stateless/02884_parquet_new_encodings.reference @@ -0,0 +1 @@ +SWEEP SWETT 00459 \N ('20221206100111','+0100') ('20221206100111','+0100') ('20221206100111','+0100') 3 11 T \N diff --git a/tests/queries/0_stateless/02884_parquet_new_encodings.sh b/tests/queries/0_stateless/02884_parquet_new_encodings.sh new file mode 100755 index 00000000000..01114c2e4f4 --- /dev/null +++ b/tests/queries/0_stateless/02884_parquet_new_encodings.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +# Tags: no-fasttest + +CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +# shellcheck source=../shell_config.sh +. "$CURDIR"/../shell_config.sh + +$CLICKHOUSE_LOCAL -q "select * from file('$CURDIR/data_parquet/delta_lenght_byte_array_encoding.parquet')" + diff --git a/tests/queries/0_stateless/data_parquet/delta_lenght_byte_array_encoding.parquet b/tests/queries/0_stateless/data_parquet/delta_lenght_byte_array_encoding.parquet new file mode 100644 index 0000000000000000000000000000000000000000..cf785d97dc7032fdec8e51c8d3749c685ffc7a4d GIT binary patch literal 2795 zcmdUxO=ufO6vt;*@>-EMvg3G$9V!wiu?Q_qB+(RlDJeMM7EwfElH8V+GIAFrt+HAL zdNYR7QcOwO9C|V-h2o~A1w${zgnW>Da6<@%LUL*_t`8w8K9tZmv%BM2$Lr=0DAme% znD^dq{`)?5Re$$n1sm9c>*>W`zcy=F#|ZvRoW*DyAzho(vReMsnG+}Kk>4K+Pu7fd zznPiH@8PBUH}WZdbr1`N=nhm?bkW3a;}a{!4M_IHJ~&=#2P3MwB2V4Kp*L0a`*ah}wf8 z?19LjQ445@`{^N|kgP1Vwr!|LQBy#MwcR>vTgZRL$i?-cmRu_Ob8%t794{5K25Hn} z3X_>Ck{qr>&Jit=vvReh$>pYory~1`jvLW(F(Vx|GIIA$R_e~(X_A9#V=a+^-zdRs)Z$w$lKWFAe-k;5*%Zi}`kQ5ZG|+OG3kwVot-lRqXd5 z*Ktkk3(W6BZQ|q}s{OG4_i$esAC&73|OvO@a~8NuAV7x%Z0jKr*q85P4R>6XdTPt|BF1 ztA%9DStaI+QHWXElZ@H1!DRT7ro?|ciV68`TYPH&3C0weDs8Ha70y`5;{%HB!>JAW zTw@AogEUGjx*}|qL$BuX=@)`-Z)kb5iy4Lah%@I#63IH8-tp;5H&p5~PF)^JDov-R zO1+a+q+T8a=%;2<>TWZXF80ch!oAA5znDp}yKz(H&KnB$I>%nz8_CVmNo3I7M^aR% zMrx(ikORNXUTGkSsmoBIOIVA27+wl;1OVw94rtut%I{}@Zq3wr0I2z_StL) f-VCmHwCDKnE(qS3_2@dhJ6`aM77T>y@H6)xIfFfV literal 0 HcmV?d00001 From bf9567aac4506102232f9cd1200e8ebae11d2c8a Mon Sep 17 00:00:00 2001 From: avogar Date: Mon, 25 Sep 2023 18:26:29 +0000 Subject: [PATCH 07/99] Fix typo in filename --- .../0_stateless/02884_parquet_new_encodings.sh | 2 +- ...uet => delta_length_byte_array_encoding.parquet} | Bin 2 files changed, 1 insertion(+), 1 deletion(-) rename tests/queries/0_stateless/data_parquet/{delta_lenght_byte_array_encoding.parquet => delta_length_byte_array_encoding.parquet} (100%) diff --git a/tests/queries/0_stateless/02884_parquet_new_encodings.sh b/tests/queries/0_stateless/02884_parquet_new_encodings.sh index 01114c2e4f4..496ed126e23 100755 --- a/tests/queries/0_stateless/02884_parquet_new_encodings.sh +++ b/tests/queries/0_stateless/02884_parquet_new_encodings.sh @@ -5,5 +5,5 @@ CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) # shellcheck source=../shell_config.sh . "$CURDIR"/../shell_config.sh -$CLICKHOUSE_LOCAL -q "select * from file('$CURDIR/data_parquet/delta_lenght_byte_array_encoding.parquet')" +$CLICKHOUSE_LOCAL -q "select * from file('$CURDIR/data_parquet/delta_length_byte_array_encoding.parquet')" diff --git a/tests/queries/0_stateless/data_parquet/delta_lenght_byte_array_encoding.parquet b/tests/queries/0_stateless/data_parquet/delta_length_byte_array_encoding.parquet similarity index 100% rename from tests/queries/0_stateless/data_parquet/delta_lenght_byte_array_encoding.parquet rename to tests/queries/0_stateless/data_parquet/delta_length_byte_array_encoding.parquet From ad67b6c2ea8d9733d94f71b9ba9adcd2dfdf7f15 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Mon, 30 Oct 2023 19:33:06 +0800 Subject: [PATCH 08/99] allow tuple field pruning --- .../Impl/NativeORCBlockInputFormat.cpp | 150 +++++++++++++++++- 1 file changed, 143 insertions(+), 7 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index b346ef3d232..0af4428b5f0 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -679,6 +679,47 @@ buildORCSearchArgument(const KeyCondition & key_condition, const Block & header, } +static std::string toDotColumnPath(const std::vector & columns) +{ + if (columns.empty()) + return {}; + + std::ostringstream column_stream; // STYLE_CHECK_ALLOW_STD_STRING_STREAM + std::copy(columns.begin(), columns.end(), std::ostream_iterator(column_stream, ".")); + std::string column_path = column_stream.str(); + return column_path.substr(0, column_path.length() - 1); +} + +static void buildORCTypeNameIdMap( + const orc::Type * orc_type, + std::vector & columns, + bool case_insensitive_column_matching, + std::map & id_type_map, + std::map & name_id_map) +{ + id_type_map[orc_type->getColumnId()] = orc_type; + if (orc::STRUCT == orc_type->getKind()) + { + for (size_t i = 0; i < orc_type->getSubtypeCount(); ++i) + { + const std::string & field_name = orc_type->getFieldName(i); + columns.push_back(field_name); + auto column_path = toDotColumnPath(columns); + if (case_insensitive_column_matching) + boost::to_lower(column_path); + name_id_map[column_path] = orc_type->getSubtype(i)->getColumnId(); + buildORCTypeNameIdMap(orc_type->getSubtype(i), columns, case_insensitive_column_matching, id_type_map, name_id_map); + columns.pop_back(); + } + } + else + { + // other non-primitive type + for (size_t j = 0; j < orc_type->getSubtypeCount(); ++j) + buildORCTypeNameIdMap(orc_type->getSubtype(j), columns, case_insensitive_column_matching, id_type_map, name_id_map); + } +} + static void getFileReaderAndSchema( ReadBuffer & in, std::unique_ptr & file_reader, @@ -706,6 +747,76 @@ static void getFileReaderAndSchema( } } +static void updateIncludeIndices( + DataTypePtr type, const orc::Type * orc_type, bool case_insensitive_column_matching, std::unordered_set & column_indices) +{ + /// Primitive types + if (orc_type->getSubtypeCount() == 0) + { + column_indices.insert(orc_type->getColumnId()); + return; + } + + auto non_nullable_type = removeNullable(type); + switch (orc_type->getKind()) + { + case orc::LIST: { + const auto * array_type = typeid_cast(non_nullable_type.get()); + if (array_type) + { + updateIncludeIndices( + array_type->getNestedType(), orc_type->getSubtype(0), case_insensitive_column_matching, column_indices); + } + return; + } + case orc::MAP: { + const auto * map_type = typeid_cast(non_nullable_type.get()); + if (map_type) + { + updateIncludeIndices(map_type->getKeyType(), orc_type->getSubtype(0), case_insensitive_column_matching, column_indices); + updateIncludeIndices(map_type->getValueType(), orc_type->getSubtype(1), case_insensitive_column_matching, column_indices); + } + return; + } + case orc::STRUCT: { + const auto * tuple_type = typeid_cast(non_nullable_type.get()); + if (tuple_type) + { + if (tuple_type->haveExplicitNames()) + { + const auto & names = tuple_type->getElementNames(); + for (size_t tuple_i = 0; tuple_i < names.size(); ++tuple_i) + { + const auto & name = names[tuple_i]; + for (size_t struct_i = 0; struct_i < orc_type->getSubtypeCount(); ++struct_i) + { + if (boost::equals(orc_type->getFieldName(struct_i), name) + || (case_insensitive_column_matching && boost::iequals(orc_type->getFieldName(struct_i), name))) + { + updateIncludeIndices( + tuple_type->getElement(tuple_i), + orc_type->getSubtype(struct_i), + case_insensitive_column_matching, + column_indices); + break; + } + } + } + } + else + { + for (size_t i = 0; i < tuple_type->getElements().size() && i < orc_type->getSubtypeCount(); ++i) + updateIncludeIndices( + tuple_type->getElement(i), orc_type->getSubtype(i), case_insensitive_column_matching, column_indices); + } + } + return; + } + default: + return; + } +} + NativeORCBlockInputFormat::NativeORCBlockInputFormat(ReadBuffer & in_, Block header_, const FormatSettings & format_settings_) : IInputFormat(std::move(header_), &in_), format_settings(format_settings_), skip_stripes(format_settings.orc.skip_stripes) { @@ -727,15 +838,39 @@ void NativeORCBlockInputFormat::prepareFileReader() format_settings.null_as_default, format_settings.orc.case_insensitive_column_matching); - const bool ignore_case = format_settings.orc.case_insensitive_column_matching; - std::unordered_set nested_table_names = Nested::getAllTableNames(getPort().getHeader(), ignore_case); - for (size_t i = 0; i < schema.columns(); ++i) + const bool ignore_case = format_settings.orc.case_insensitive_column_matching; + std::vector columns; + std::map id_type_map; + std::map name_id_map; + buildORCTypeNameIdMap(&file_reader->getType(), columns, ignore_case, id_type_map, name_id_map); + + // std::cout << "subtypes:" << file_reader->getType().getSubtypeCount() << std::endl; + // std::cout << "id type map" << std::endl; + // for (const auto & [k, v]: id_type_map) + // std::cout << "id:" << k << ", type:" << v->toString() << std::endl; + // std::cout << "name id map" << std::endl; + // for (const auto & [k, v]: name_id_map) + // std::cout << "name:" << k << ", id:" << v << std::endl; + + const auto & header = getPort().getHeader(); + std::unordered_set column_indices; + for (const auto & column : header) { - const auto & name = schema.getByPosition(i).name; - if (getPort().getHeader().has(name, ignore_case) || nested_table_names.contains(ignore_case ? boost::to_lower_copy(name) : name)) - include_indices.push_back(static_cast(i)); + auto name = column.name; + if (ignore_case) + boost::to_lower(name); + + if (name_id_map.contains(name)) + { + auto id = name_id_map[name]; + if (id_type_map.contains(id)) + { + updateIncludeIndices(column.type, id_type_map[id], ignore_case, column_indices); + } + } } + include_indices.assign(column_indices.begin(), column_indices.end()); if (format_settings.orc.filter_push_down && key_condition && !sarg) { @@ -816,6 +951,7 @@ Chunk NativeORCBlockInputFormat::generate() Chunk res; size_t num_rows = batch->numElements; const auto & schema = stripe_reader->getSelectedType(); + // std::cout << "output schema:" << schema.toString() << std::endl; orc_column_to_ch_column->orcTableToCHChunk(res, &schema, batch.get(), num_rows, &block_missing_values); approx_bytes_read_for_chunk = num_rows * current_stripe_info->getLength() / current_stripe_info->getNumberOfRows(); @@ -1376,8 +1512,8 @@ static ColumnWithTypeAndName readColumnFromORCColumn( Columns tuple_elements; DataTypes tuple_types; std::vector tuple_names; - const auto * tuple_type_hint = type_hint ? typeid_cast(type_hint.get()) : nullptr; + const auto * tuple_type_hint = type_hint ? typeid_cast(type_hint.get()) : nullptr; const auto * orc_struct_column = dynamic_cast(orc_column); for (size_t i = 0; i < orc_type->getSubtypeCount(); ++i) { From 8954b806b49b00f3c2c5a53b04ca80c31eb2e68c Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Mon, 30 Oct 2023 20:08:09 +0800 Subject: [PATCH 09/99] add uts --- .../02906_orc_tuple_field_prune.reference | 108 ++++++++++++++++++ .../02906_orc_tuple_field_prune.sql | 38 ++++++ 2 files changed, 146 insertions(+) create mode 100644 tests/queries/0_stateless/02906_orc_tuple_field_prune.reference create mode 100644 tests/queries/0_stateless/02906_orc_tuple_field_prune.sql diff --git a/tests/queries/0_stateless/02906_orc_tuple_field_prune.reference b/tests/queries/0_stateless/02906_orc_tuple_field_prune.reference new file mode 100644 index 00000000000..dfdd38f5e8e --- /dev/null +++ b/tests/queries/0_stateless/02906_orc_tuple_field_prune.reference @@ -0,0 +1,108 @@ +int64_column Nullable(Int64) +string_column Nullable(String) +float64_column Nullable(Float64) +tuple_column Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)) +array_tuple_column Array(Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))) +map_tuple_column Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))) +-- { echoOn } +-- Test primitive types +select int64_column, string_column, float64_column from file('02906.orc') where int64_column % 15 = 0; +0 0 0 +15 15 15 +30 30 30 +45 45 45 +60 60 60 +75 75 75 +90 90 90 +-- Test tuple type with names +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))') where int64_column % 15 = 0; +(NULL,NULL,NULL) +('15',15,15) +(NULL,NULL,NULL) +('45',45,45) +(NULL,NULL,NULL) +('75',75,75) +(NULL,NULL,NULL) +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(c Nullable(Int64))') where int64_column % 15 = 0; +(NULL) +(15) +(NULL) +(45) +(NULL) +(75) +(NULL) +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(c Nullable(Int64), d Nullable(String))') where int64_column % 15 = 0; +(NULL,NULL) +(15,NULL) +(NULL,NULL) +(45,NULL) +(NULL,NULL) +(75,NULL) +(NULL,NULL) +-- Test tuple type without names +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(Nullable(String), Nullable(Float64), Nullable(Int64))') where int64_column % 15 = 0; +(NULL,NULL,NULL) +('15',15,15) +(NULL,NULL,NULL) +('45',45,45) +(NULL,NULL,NULL) +('75',75,75) +(NULL,NULL,NULL) +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(Nullable(String), Nullable(Float64))') where int64_column % 15 = 0; +(NULL,NULL) +('15',15) +(NULL,NULL) +('45',45) +(NULL,NULL) +('75',75) +(NULL,NULL) +-- Test tuple nested in array +select array_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') where int64_column % 15 = 0; +[(NULL,NULL,NULL)] +[('15',15,15)] +[(NULL,NULL,NULL)] +[('45',45,45)] +[(NULL,NULL,NULL)] +[('75',75,75)] +[(NULL,NULL,NULL)] +select array_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(b Nullable(Float64), c Nullable(Int64)))') where int64_column % 15 = 0; +[(NULL,NULL)] +[(15,15)] +[(NULL,NULL)] +[(45,45)] +[(NULL,NULL)] +[(75,75)] +[(NULL,NULL)] +select array_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(b Nullable(Float64), c Nullable(Int64), d Nullable(String)))') where int64_column % 15 = 0; +[(NULL,NULL,NULL)] +[(15,15,NULL)] +[(NULL,NULL,NULL)] +[(45,45,NULL)] +[(NULL,NULL,NULL)] +[(75,75,NULL)] +[(NULL,NULL,NULL)] +-- Test tuple nested in map +select map_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') where int64_column % 15 = 0; +{'0':(NULL,NULL,NULL)} +{'15':('15',15,15)} +{'30':(NULL,NULL,NULL)} +{'45':('45',45,45)} +{'60':(NULL,NULL,NULL)} +{'75':('75',75,75)} +{'90':(NULL,NULL,NULL)} +select map_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(b Nullable(Float64), c Nullable(Int64)))') where int64_column % 15 = 0; +{'0':(NULL,NULL)} +{'15':(15,15)} +{'30':(NULL,NULL)} +{'45':(45,45)} +{'60':(NULL,NULL)} +{'75':(75,75)} +{'90':(NULL,NULL)} +select map_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(b Nullable(Float64), c Nullable(Int64), d Nullable(String)))') where int64_column % 15 = 0; +{'0':(NULL,NULL,NULL)} +{'15':(15,15,NULL)} +{'30':(NULL,NULL,NULL)} +{'45':(45,45,NULL)} +{'60':(NULL,NULL,NULL)} +{'75':(75,75,NULL)} +{'90':(NULL,NULL,NULL)} diff --git a/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql b/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql new file mode 100644 index 00000000000..a7f2c31d3e1 --- /dev/null +++ b/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql @@ -0,0 +1,38 @@ +set engine_file_truncate_on_insert = 1; +set flatten_nested = 0; + +insert into function file('02906.orc') +select + number::Int64 as int64_column, + number::String as string_column, + number::Float64 as float64_column, + cast(if(number % 10 = 0, tuple(null, null, null), tuple(number::String, number::Float64, number::Int64)) as Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))) as tuple_column, + cast(if(number % 10 = 0, array(tuple(null, null, null)), array(tuple(number::String, number::Float64, number::Int64))) as Array(Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))) as array_tuple_column, + cast(if(number % 10 = 0, map(number::String, tuple(null, null, null)), map(number::String, tuple(number::String, number::Float64, number::Int64))) as Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))) as map_tuple_column + from numbers(100); + +desc file('02906.orc'); + +-- { echoOn } +-- Test primitive types +select int64_column, string_column, float64_column from file('02906.orc') where int64_column % 15 = 0; + +-- Test tuple type with names +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))') where int64_column % 15 = 0; +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(c Nullable(Int64))') where int64_column % 15 = 0; +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(c Nullable(Int64), d Nullable(String))') where int64_column % 15 = 0; + +-- Test tuple type without names +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(Nullable(String), Nullable(Float64), Nullable(Int64))') where int64_column % 15 = 0; +select tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(Nullable(String), Nullable(Float64))') where int64_column % 15 = 0; + +-- Test tuple nested in array +select array_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') where int64_column % 15 = 0; +select array_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(b Nullable(Float64), c Nullable(Int64)))') where int64_column % 15 = 0; +select array_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(b Nullable(Float64), c Nullable(Int64), d Nullable(String)))') where int64_column % 15 = 0; + +-- Test tuple nested in map +select map_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') where int64_column % 15 = 0; +select map_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(b Nullable(Float64), c Nullable(Int64)))') where int64_column % 15 = 0; +select map_tuple_column from file('02906.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(b Nullable(Float64), c Nullable(Int64), d Nullable(String)))') where int64_column % 15 = 0; +-- { echoOff } From 38f24c04558e4528ca1d9cf92ba170c400177569 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Mon, 30 Oct 2023 20:29:43 +0800 Subject: [PATCH 10/99] add performance tests --- tests/performance/orc_tuple_field_prune.xml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 tests/performance/orc_tuple_field_prune.xml diff --git a/tests/performance/orc_tuple_field_prune.xml b/tests/performance/orc_tuple_field_prune.xml new file mode 100644 index 00000000000..2bcd15c8635 --- /dev/null +++ b/tests/performance/orc_tuple_field_prune.xml @@ -0,0 +1,17 @@ + + + 1 + 10000 + 0 + + + + insert into function file('test_orc_tfp.orc') select * from generateRandom('int64_column Nullable(Int64), tuple_column Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)), array_tuple_column Array(Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))), map_tuple_column Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') limit 1000000 + + + DROP TABLE IF EXISTS test_orc_tfp + + select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(c Nullable(Int64))') format Null + select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(c Nullable(Int64)))') format Null + select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(c Nullable(Int64)))') format Null + \ No newline at end of file From aef9ce0cf06f4541ca624e8b6711bf4575a20f40 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Mon, 30 Oct 2023 20:30:02 +0800 Subject: [PATCH 11/99] update orc version --- contrib/orc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/orc b/contrib/orc index f31c271110a..2c31e314e4e 160000 --- a/contrib/orc +++ b/contrib/orc @@ -1 +1 @@ -Subproject commit f31c271110a2f0dac908a152f11708193ae209ee +Subproject commit 2c31e314e4e36dcb1c58ca1cd7454fc4685af997 From 423df126254980d8a05e15659a8b45a479a8a5be Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Mon, 30 Oct 2023 20:38:56 +0800 Subject: [PATCH 12/99] update orc version --- contrib/orc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/orc b/contrib/orc index 2c31e314e4e..5f8db0fb0a4 160000 --- a/contrib/orc +++ b/contrib/orc @@ -1 +1 @@ -Subproject commit 2c31e314e4e36dcb1c58ca1cd7454fc4685af997 +Subproject commit 5f8db0fb0a47fbc4902bf9d7f712e65309f13d2d From 1e5703a77b50a1fb4f54afcaf0c322cdf989c3d7 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Mon, 30 Oct 2023 20:41:45 +0800 Subject: [PATCH 13/99] upgrade orc version --- contrib/orc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/orc b/contrib/orc index 5f8db0fb0a4..5046972fbab 160000 --- a/contrib/orc +++ b/contrib/orc @@ -1 +1 @@ -Subproject commit 5f8db0fb0a47fbc4902bf9d7f712e65309f13d2d +Subproject commit 5046972fbabfe3cdf77a8768228793c7c0a61085 From f53fdbeeadf7a2be2fa962e0ee0be91628348b93 Mon Sep 17 00:00:00 2001 From: avogar Date: Mon, 30 Oct 2023 19:11:03 +0000 Subject: [PATCH 14/99] Fix vuild for s390 --- contrib/arrow | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/arrow b/contrib/arrow index 9d9c464ce68..8cdbf43f78a 160000 --- a/contrib/arrow +++ b/contrib/arrow @@ -1 +1 @@ -Subproject commit 9d9c464ce6883f52aaca9f913eec4cd50006c767 +Subproject commit 8cdbf43f78ad02615aef29dc7f9af0dea22a03e4 From e5db57204d42b578296a1d2f022f38641d702be9 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Tue, 31 Oct 2023 11:57:47 +0800 Subject: [PATCH 15/99] fix bugs --- contrib/orc | 2 +- .../Impl/NativeORCBlockInputFormat.cpp | 26 +++++++++---------- .../02906_orc_tuple_field_prune.sql | 2 +- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/contrib/orc b/contrib/orc index 5046972fbab..e24f2c2a3ca 160000 --- a/contrib/orc +++ b/contrib/orc @@ -1 +1 @@ -Subproject commit 5046972fbabfe3cdf77a8768228793c7c0a61085 +Subproject commit e24f2c2a3ca0769c96704ab20ad6f512a83ea2ad diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 0af4428b5f0..88b3fbeee2b 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -52,19 +52,19 @@ ORCInputStream::ORCInputStream(SeekableReadBuffer & in_, size_t file_size_) : in { } -uint64_t ORCInputStream::getLength() const +UInt64 ORCInputStream::getLength() const { return file_size; } -uint64_t ORCInputStream::getNaturalReadSize() const +UInt64 ORCInputStream::getNaturalReadSize() const { return 128 * 1024; } -void ORCInputStream::read(void * buf, uint64_t length, uint64_t offset) +void ORCInputStream::read(void * buf, UInt64 length, UInt64 offset) { - if (offset != static_cast(in.getPosition())) + if (offset != static_cast(in.getPosition())) in.seek(offset, SEEK_SET); in.readStrict(reinterpret_cast(buf), length); @@ -102,7 +102,7 @@ std::unique_ptr asORCInputStreamLoadIntoMemory(ReadBuffer & in static const orc::Type * getORCTypeByName(const orc::Type & schema, const String & name, bool case_insensitive_column_matching) { - for (uint64_t i = 0; i != schema.getSubtypeCount(); ++i) + for (UInt64 i = 0; i != schema.getSubtypeCount(); ++i) if (boost::equals(schema.getFieldName(i), name) || (case_insensitive_column_matching && boost::iequals(schema.getFieldName(i), name))) return schema.getSubtype(i); @@ -694,8 +694,8 @@ static void buildORCTypeNameIdMap( const orc::Type * orc_type, std::vector & columns, bool case_insensitive_column_matching, - std::map & id_type_map, - std::map & name_id_map) + std::map & id_type_map, + std::map & name_id_map) { id_type_map[orc_type->getColumnId()] = orc_type; if (orc::STRUCT == orc_type->getKind()) @@ -841,8 +841,8 @@ void NativeORCBlockInputFormat::prepareFileReader() const bool ignore_case = format_settings.orc.case_insensitive_column_matching; std::vector columns; - std::map id_type_map; - std::map name_id_map; + std::map id_type_map; + std::map name_id_map; buildORCTypeNameIdMap(&file_reader->getType(), columns, ignore_case, id_type_map, name_id_map); // std::cout << "subtypes:" << file_reader->getType().getSubtypeCount() << std::endl; @@ -854,7 +854,7 @@ void NativeORCBlockInputFormat::prepareFileReader() // std::cout << "name:" << k << ", id:" << v << std::endl; const auto & header = getPort().getHeader(); - std::unordered_set column_indices; + std::unordered_set include_typeids; for (const auto & column : header) { auto name = column.name; @@ -866,11 +866,11 @@ void NativeORCBlockInputFormat::prepareFileReader() auto id = name_id_map[name]; if (id_type_map.contains(id)) { - updateIncludeIndices(column.type, id_type_map[id], ignore_case, column_indices); + updateIncludeIndices(column.type, id_type_map[id], ignore_case, include_typeids); } } } - include_indices.assign(column_indices.begin(), column_indices.end()); + include_indices.assign(include_typeids.begin(), include_typeids.end()); if (format_settings.orc.filter_push_down && key_condition && !sarg) { @@ -895,7 +895,7 @@ bool NativeORCBlockInputFormat::prepareStripeReader() throw Exception(ErrorCodes::INCORRECT_DATA, "ORC stripe {} has no rows", current_stripe); orc::RowReaderOptions row_reader_options; - row_reader_options.include(include_indices); + row_reader_options.includeTypes(include_indices); row_reader_options.range(current_stripe_info->getOffset(), current_stripe_info->getLength()); if (format_settings.orc.filter_push_down && sarg) { diff --git a/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql b/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql index a7f2c31d3e1..834caa1da53 100644 --- a/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql +++ b/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql @@ -1,7 +1,7 @@ set engine_file_truncate_on_insert = 1; set flatten_nested = 0; -insert into function file('02906.orc') +insert into function file('02906.orc', 'ORC') select number::Int64 as int64_column, number::String as string_column, From c97b2c5be74a73305a2c0dbc905dc59ac77c0fd3 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Tue, 31 Oct 2023 12:00:45 +0800 Subject: [PATCH 16/99] fix code style --- .../Impl/NativeORCBlockInputFormat.cpp | 24 +++++++++---------- tests/performance/orc_tuple_field_prune.xml | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 88b3fbeee2b..fd0f4ee0ca0 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -747,13 +747,13 @@ static void getFileReaderAndSchema( } } -static void updateIncludeIndices( - DataTypePtr type, const orc::Type * orc_type, bool case_insensitive_column_matching, std::unordered_set & column_indices) +static void updateIncludeTypeIds( + DataTypePtr type, const orc::Type * orc_type, bool case_insensitive_column_matching, std::unordered_set & include_typeids) { /// Primitive types if (orc_type->getSubtypeCount() == 0) { - column_indices.insert(orc_type->getColumnId()); + include_typeids.insert(orc_type->getColumnId()); return; } @@ -764,8 +764,8 @@ static void updateIncludeIndices( const auto * array_type = typeid_cast(non_nullable_type.get()); if (array_type) { - updateIncludeIndices( - array_type->getNestedType(), orc_type->getSubtype(0), case_insensitive_column_matching, column_indices); + updateIncludeTypeIds( + array_type->getNestedType(), orc_type->getSubtype(0), case_insensitive_column_matching, include_typeids); } return; } @@ -773,8 +773,8 @@ static void updateIncludeIndices( const auto * map_type = typeid_cast(non_nullable_type.get()); if (map_type) { - updateIncludeIndices(map_type->getKeyType(), orc_type->getSubtype(0), case_insensitive_column_matching, column_indices); - updateIncludeIndices(map_type->getValueType(), orc_type->getSubtype(1), case_insensitive_column_matching, column_indices); + updateIncludeTypeIds(map_type->getKeyType(), orc_type->getSubtype(0), case_insensitive_column_matching, include_typeids); + updateIncludeTypeIds(map_type->getValueType(), orc_type->getSubtype(1), case_insensitive_column_matching, include_typeids); } return; } @@ -793,11 +793,11 @@ static void updateIncludeIndices( if (boost::equals(orc_type->getFieldName(struct_i), name) || (case_insensitive_column_matching && boost::iequals(orc_type->getFieldName(struct_i), name))) { - updateIncludeIndices( + updateIncludeTypeIds( tuple_type->getElement(tuple_i), orc_type->getSubtype(struct_i), case_insensitive_column_matching, - column_indices); + include_typeids); break; } } @@ -806,8 +806,8 @@ static void updateIncludeIndices( else { for (size_t i = 0; i < tuple_type->getElements().size() && i < orc_type->getSubtypeCount(); ++i) - updateIncludeIndices( - tuple_type->getElement(i), orc_type->getSubtype(i), case_insensitive_column_matching, column_indices); + updateIncludeTypeIds( + tuple_type->getElement(i), orc_type->getSubtype(i), case_insensitive_column_matching, include_typeids); } } return; @@ -866,7 +866,7 @@ void NativeORCBlockInputFormat::prepareFileReader() auto id = name_id_map[name]; if (id_type_map.contains(id)) { - updateIncludeIndices(column.type, id_type_map[id], ignore_case, include_typeids); + updateIncludeTypeIds(column.type, id_type_map[id], ignore_case, include_typeids); } } } diff --git a/tests/performance/orc_tuple_field_prune.xml b/tests/performance/orc_tuple_field_prune.xml index 2bcd15c8635..4e338733329 100644 --- a/tests/performance/orc_tuple_field_prune.xml +++ b/tests/performance/orc_tuple_field_prune.xml @@ -6,7 +6,7 @@ - insert into function file('test_orc_tfp.orc') select * from generateRandom('int64_column Nullable(Int64), tuple_column Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)), array_tuple_column Array(Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))), map_tuple_column Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') limit 1000000 + insert into function file('test_orc_tfp.orc', 'ORC') select * from generateRandom('int64_column Nullable(Int64), tuple_column Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)), array_tuple_column Array(Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))), map_tuple_column Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') limit 1000000 DROP TABLE IF EXISTS test_orc_tfp From 5e21d2459a00b38601bb2c5709795a9bda72fa65 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Tue, 31 Oct 2023 14:19:51 +0800 Subject: [PATCH 17/99] fix failed fast test --- tests/queries/0_stateless/02906_orc_tuple_field_prune.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql b/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql index 834caa1da53..5428abc40de 100644 --- a/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql +++ b/tests/queries/0_stateless/02906_orc_tuple_field_prune.sql @@ -1,3 +1,5 @@ +-- Tags: no-fasttest, no-parallel + set engine_file_truncate_on_insert = 1; set flatten_nested = 0; From 7c5a7fc03aa5a6f2675c9769976413d03f2b9f01 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Tue, 31 Oct 2023 17:17:21 +0800 Subject: [PATCH 18/99] update orc version --- contrib/orc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/orc b/contrib/orc index e24f2c2a3ca..f31c271110a 160000 --- a/contrib/orc +++ b/contrib/orc @@ -1 +1 @@ -Subproject commit e24f2c2a3ca0769c96704ab20ad6f512a83ea2ad +Subproject commit f31c271110a2f0dac908a152f11708193ae209ee From b27658742223e750902ec3f181d2a662fc7bba1f Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Wed, 1 Nov 2023 15:43:20 +0800 Subject: [PATCH 19/99] fix failed uts --- .../Impl/NativeORCBlockInputFormat.cpp | 130 ++++++++++++------ 1 file changed, 90 insertions(+), 40 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index fd0f4ee0ca0..2c3db6432c8 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -100,11 +100,11 @@ std::unique_ptr asORCInputStreamLoadIntoMemory(ReadBuffer & in return std::make_unique(std::move(file_data), file_size); } -static const orc::Type * getORCTypeByName(const orc::Type & schema, const String & name, bool case_insensitive_column_matching) +static const orc::Type * getORCTypeByName(const orc::Type & schema, const String & name, bool ignore_case) { for (UInt64 i = 0; i != schema.getSubtypeCount(); ++i) if (boost::equals(schema.getFieldName(i), name) - || (case_insensitive_column_matching && boost::iequals(schema.getFieldName(i), name))) + || (ignore_case && boost::iequals(schema.getFieldName(i), name))) return schema.getSubtype(i); return nullptr; } @@ -690,10 +690,10 @@ static std::string toDotColumnPath(const std::vector & columns) return column_path.substr(0, column_path.length() - 1); } -static void buildORCTypeNameIdMap( +[[maybe_unused]] static void buildORCTypeNameIdMap( const orc::Type * orc_type, std::vector & columns, - bool case_insensitive_column_matching, + bool ignore_case, std::map & id_type_map, std::map & name_id_map) { @@ -705,10 +705,10 @@ static void buildORCTypeNameIdMap( const std::string & field_name = orc_type->getFieldName(i); columns.push_back(field_name); auto column_path = toDotColumnPath(columns); - if (case_insensitive_column_matching) + if (ignore_case) boost::to_lower(column_path); name_id_map[column_path] = orc_type->getSubtype(i)->getColumnId(); - buildORCTypeNameIdMap(orc_type->getSubtype(i), columns, case_insensitive_column_matching, id_type_map, name_id_map); + buildORCTypeNameIdMap(orc_type->getSubtype(i), columns, ignore_case, id_type_map, name_id_map); columns.pop_back(); } } @@ -716,7 +716,7 @@ static void buildORCTypeNameIdMap( { // other non-primitive type for (size_t j = 0; j < orc_type->getSubtypeCount(); ++j) - buildORCTypeNameIdMap(orc_type->getSubtype(j), columns, case_insensitive_column_matching, id_type_map, name_id_map); + buildORCTypeNameIdMap(orc_type->getSubtype(j), columns, ignore_case, id_type_map, name_id_map); } } @@ -747,10 +747,54 @@ static void getFileReaderAndSchema( } } -static void updateIncludeTypeIds( - DataTypePtr type, const orc::Type * orc_type, bool case_insensitive_column_matching, std::unordered_set & include_typeids) +static const orc::Type * traverseDownORCTypeByName( + const std::string & target, + const orc::Type * orc_type, + DataTypePtr & type, + bool ignore_case) { - /// Primitive types + // std::cout << "target:" << target << ", orc_type:" << orc_type->toString() << ", type:" << type->getName() << std::endl; + if (target.empty()) + return orc_type; + + auto split = Nested::splitName(target); + if (orc::STRUCT == orc_type->getKind()) + { + const auto * orc_field_type = getORCTypeByName(*orc_type, split.first, ignore_case); + return orc_field_type ? traverseDownORCTypeByName(split.second, orc_field_type, type, ignore_case) : nullptr; + } + else if (orc::LIST == orc_type->getKind()) + { + /// For cases in which header contains subcolumns flattened from nested columns. + /// For example, "a Nested(x String, y Int64)" is flattened to "a.x Array(String), a.y Array(Int64)", and orc file schema is still "a array>". + /// In this case, we should skip possible array type and traverse down to its nested struct type. + const auto * array_type = typeid_cast(removeNullable(type).get()); + const auto * orc_nested_type = orc_type->getSubtype(0); + if (array_type && orc::STRUCT == orc_nested_type->getKind()) + { + const auto * orc_field_type = getORCTypeByName(*orc_nested_type, split.first, ignore_case); + if (orc_field_type) + { + /// Avoid inconsistency between CH and ORC type brought by flattened Nested type. + type = array_type->getNestedType(); + return traverseDownORCTypeByName(split.second, orc_field_type, type, ignore_case); + } + else + return nullptr; + } + else + return nullptr; + } + else + return nullptr; +} + +static void updateIncludeTypeIds( + DataTypePtr type, const orc::Type * orc_type, bool ignore_case, std::unordered_set & include_typeids) +{ + // std::cout << "ch type:" << type->getName() << ", orc_type:" << orc_type->toString() << std::endl; + + /// For primitive types, directly append column id into result if (orc_type->getSubtypeCount() == 0) { include_typeids.insert(orc_type->getColumnId()); @@ -765,7 +809,7 @@ static void updateIncludeTypeIds( if (array_type) { updateIncludeTypeIds( - array_type->getNestedType(), orc_type->getSubtype(0), case_insensitive_column_matching, include_typeids); + array_type->getNestedType(), orc_type->getSubtype(0), ignore_case, include_typeids); } return; } @@ -773,8 +817,8 @@ static void updateIncludeTypeIds( const auto * map_type = typeid_cast(non_nullable_type.get()); if (map_type) { - updateIncludeTypeIds(map_type->getKeyType(), orc_type->getSubtype(0), case_insensitive_column_matching, include_typeids); - updateIncludeTypeIds(map_type->getValueType(), orc_type->getSubtype(1), case_insensitive_column_matching, include_typeids); + updateIncludeTypeIds(map_type->getKeyType(), orc_type->getSubtype(0), ignore_case, include_typeids); + updateIncludeTypeIds(map_type->getValueType(), orc_type->getSubtype(1), ignore_case, include_typeids); } return; } @@ -791,12 +835,12 @@ static void updateIncludeTypeIds( for (size_t struct_i = 0; struct_i < orc_type->getSubtypeCount(); ++struct_i) { if (boost::equals(orc_type->getFieldName(struct_i), name) - || (case_insensitive_column_matching && boost::iequals(orc_type->getFieldName(struct_i), name))) + || (ignore_case && boost::iequals(orc_type->getFieldName(struct_i), name))) { updateIncludeTypeIds( tuple_type->getElement(tuple_i), orc_type->getSubtype(struct_i), - case_insensitive_column_matching, + ignore_case, include_typeids); break; } @@ -807,7 +851,7 @@ static void updateIncludeTypeIds( { for (size_t i = 0; i < tuple_type->getElements().size() && i < orc_type->getSubtypeCount(); ++i) updateIncludeTypeIds( - tuple_type->getElement(i), orc_type->getSubtype(i), case_insensitive_column_matching, include_typeids); + tuple_type->getElement(i), orc_type->getSubtype(i), ignore_case, include_typeids); } } return; @@ -838,40 +882,47 @@ void NativeORCBlockInputFormat::prepareFileReader() format_settings.null_as_default, format_settings.orc.case_insensitive_column_matching); - const bool ignore_case = format_settings.orc.case_insensitive_column_matching; - std::vector columns; - std::map id_type_map; - std::map name_id_map; - buildORCTypeNameIdMap(&file_reader->getType(), columns, ignore_case, id_type_map, name_id_map); - - // std::cout << "subtypes:" << file_reader->getType().getSubtypeCount() << std::endl; - // std::cout << "id type map" << std::endl; - // for (const auto & [k, v]: id_type_map) - // std::cout << "id:" << k << ", type:" << v->toString() << std::endl; - // std::cout << "name id map" << std::endl; - // for (const auto & [k, v]: name_id_map) - // std::cout << "name:" << k << ", id:" << v << std::endl; const auto & header = getPort().getHeader(); + const auto & file_schema = file_reader->getType(); std::unordered_set include_typeids; for (const auto & column : header) { - auto name = column.name; - if (ignore_case) - boost::to_lower(name); - - if (name_id_map.contains(name)) + auto split = Nested::splitName(column.name); + if (split.second.empty()) { - auto id = name_id_map[name]; - if (id_type_map.contains(id)) - { - updateIncludeTypeIds(column.type, id_type_map[id], ignore_case, include_typeids); - } + const auto * orc_type = getORCTypeByName(file_schema, column.name, ignore_case); + updateIncludeTypeIds(column.type, orc_type, ignore_case, include_typeids); + } + else + { + auto type = column.type; + const auto * orc_type = traverseDownORCTypeByName(column.name, &file_schema, type, ignore_case); + if (orc_type) + updateIncludeTypeIds(type, orc_type, ignore_case, include_typeids); } } include_indices.assign(include_typeids.begin(), include_typeids.end()); + /// Just for Debug + // std::vector tmp; + // std::map id_type_map; + // std::map name_id_map; + // buildORCTypeNameIdMap(&file_schema, tmp, ignore_case, id_type_map, name_id_map); + // std::cout << "just for debug:" << std::endl; + // std::cout << "subtypes:" << file_reader->getType().getSubtypeCount() << std::endl; + // std::cout << "ch output type:" << getPort().getHeader().dumpStructure() << std::endl; + // std::cout << "orc ouput type:" << file_reader->getType().toString() << std::endl; + // std::cout << "id type map" << std::endl; + // for (const auto & [k, v] : id_type_map) + // std::cout << "id:" << k << ", type:" << v->toString() << std::endl; + // std::cout << "name id map" << std::endl; + // for (const auto & [k, v] : name_id_map) + // std::cout << "name:" << k << ", id:" << v << std::endl; + // for (const auto & x : include_indices) + // std::cout << "choose " << x << std::endl; + if (format_settings.orc.filter_push_down && key_condition && !sarg) { sarg = buildORCSearchArgument(*key_condition, getPort().getHeader(), file_reader->getType(), format_settings); @@ -951,7 +1002,6 @@ Chunk NativeORCBlockInputFormat::generate() Chunk res; size_t num_rows = batch->numElements; const auto & schema = stripe_reader->getSelectedType(); - // std::cout << "output schema:" << schema.toString() << std::endl; orc_column_to_ch_column->orcTableToCHChunk(res, &schema, batch.get(), num_rows, &block_missing_values); approx_bytes_read_for_chunk = num_rows * current_stripe_info->getLength() / current_stripe_info->getNumberOfRows(); From 001cbe79126ea96b8f70ae3c5e17655c73df30cd Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Wed, 1 Nov 2023 16:58:25 +0800 Subject: [PATCH 20/99] fix typos --- src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 2c3db6432c8..3f98224f8aa 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -913,7 +913,7 @@ void NativeORCBlockInputFormat::prepareFileReader() // std::cout << "just for debug:" << std::endl; // std::cout << "subtypes:" << file_reader->getType().getSubtypeCount() << std::endl; // std::cout << "ch output type:" << getPort().getHeader().dumpStructure() << std::endl; - // std::cout << "orc ouput type:" << file_reader->getType().toString() << std::endl; + // std::cout << "orc output type:" << file_reader->getType().toString() << std::endl; // std::cout << "id type map" << std::endl; // for (const auto & [k, v] : id_type_map) // std::cout << "id:" << k << ", type:" << v->toString() << std::endl; From 24c45a4ee060c1fb2447d8acb4f0281f97ab65f1 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Wed, 1 Nov 2023 18:47:11 +0800 Subject: [PATCH 21/99] fix failed uts --- .../Impl/NativeORCBlockInputFormat.cpp | 20 +++++-------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 3f98224f8aa..7a835274bb2 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -775,7 +775,7 @@ static const orc::Type * traverseDownORCTypeByName( const auto * orc_field_type = getORCTypeByName(*orc_nested_type, split.first, ignore_case); if (orc_field_type) { - /// Avoid inconsistency between CH and ORC type brought by flattened Nested type. + /// Adjust CH type to avoid inconsistency between CH and ORC type brought by flattened Nested type. type = array_type->getNestedType(); return traverseDownORCTypeByName(split.second, orc_field_type, type, ignore_case); } @@ -883,25 +883,15 @@ void NativeORCBlockInputFormat::prepareFileReader() format_settings.orc.case_insensitive_column_matching); const bool ignore_case = format_settings.orc.case_insensitive_column_matching; - const auto & header = getPort().getHeader(); const auto & file_schema = file_reader->getType(); std::unordered_set include_typeids; for (const auto & column : header) { - auto split = Nested::splitName(column.name); - if (split.second.empty()) - { - const auto * orc_type = getORCTypeByName(file_schema, column.name, ignore_case); - updateIncludeTypeIds(column.type, orc_type, ignore_case, include_typeids); - } - else - { - auto type = column.type; - const auto * orc_type = traverseDownORCTypeByName(column.name, &file_schema, type, ignore_case); - if (orc_type) - updateIncludeTypeIds(type, orc_type, ignore_case, include_typeids); - } + auto adjusted_type = column.type; + const auto * orc_type = traverseDownORCTypeByName(column.name, &file_schema, adjusted_type, ignore_case); + if (orc_type) + updateIncludeTypeIds(adjusted_type, orc_type, ignore_case, include_typeids); } include_indices.assign(include_typeids.begin(), include_typeids.end()); From dc897215dacea4f447b127254914e32f76ca001e Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Wed, 1 Nov 2023 20:42:07 +0800 Subject: [PATCH 22/99] fix failed uts tests/queries/0_stateless/02312_parquet_orc_arrow_names_tuples.sql --- .../Impl/NativeORCBlockInputFormat.cpp | 35 +++++++++++++++---- 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 7a835274bb2..9501efbabb7 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -757,11 +757,32 @@ static const orc::Type * traverseDownORCTypeByName( if (target.empty()) return orc_type; - auto split = Nested::splitName(target); + auto search_struct_field = [&](const std::string & target_, const orc::Type * type_) -> std::pair + { + auto target_copy = target_; + if (ignore_case) + boost::to_lower(target_copy); + + for (size_t i = 0; i < type_->getSubtypeCount(); ++i) + { + auto field_name = type_->getFieldName(i); + if (ignore_case) + boost::to_lower(field_name); + + if (startsWith(target_copy, field_name) && (target_copy.size() == field_name.size() || target_copy[field_name.size()] == '.')) + { + return {target_copy.size() == field_name.size() ? "" : target_.substr(field_name.size() + 1), type_->getSubtype(i)}; + } + } + return {"", nullptr}; + }; + if (orc::STRUCT == orc_type->getKind()) { - const auto * orc_field_type = getORCTypeByName(*orc_type, split.first, ignore_case); - return orc_field_type ? traverseDownORCTypeByName(split.second, orc_field_type, type, ignore_case) : nullptr; + auto next_type_and_target = search_struct_field(target, orc_type); + const auto & next_target = next_type_and_target.first; + const auto * next_orc_type = next_type_and_target.second; + return next_orc_type ? traverseDownORCTypeByName(next_target, next_orc_type, type, ignore_case) : nullptr; } else if (orc::LIST == orc_type->getKind()) { @@ -772,12 +793,14 @@ static const orc::Type * traverseDownORCTypeByName( const auto * orc_nested_type = orc_type->getSubtype(0); if (array_type && orc::STRUCT == orc_nested_type->getKind()) { - const auto * orc_field_type = getORCTypeByName(*orc_nested_type, split.first, ignore_case); - if (orc_field_type) + auto next_type_and_target = search_struct_field(target, orc_nested_type); + const auto & next_target = next_type_and_target.first; + const auto * next_orc_type = next_type_and_target.second; + if (next_orc_type) { /// Adjust CH type to avoid inconsistency between CH and ORC type brought by flattened Nested type. type = array_type->getNestedType(); - return traverseDownORCTypeByName(split.second, orc_field_type, type, ignore_case); + return traverseDownORCTypeByName(next_target, next_orc_type, type, ignore_case); } else return nullptr; From c899ff9da2a8d0b22eba33471cd9bbb183021e73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=8E=E6=89=AC?= <654010905@qq.com> Date: Thu, 2 Nov 2023 10:30:14 +0800 Subject: [PATCH 23/99] Apply suggestions from code review Co-authored-by: Kruglov Pavel <48961922+Avogar@users.noreply.github.com> --- src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 9501efbabb7..542d13d9363 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -779,9 +779,7 @@ static const orc::Type * traverseDownORCTypeByName( if (orc::STRUCT == orc_type->getKind()) { - auto next_type_and_target = search_struct_field(target, orc_type); - const auto & next_target = next_type_and_target.first; - const auto * next_orc_type = next_type_and_target.second; + const auto [next_target, next_orc_type]= search_struct_field(target, orc_type); return next_orc_type ? traverseDownORCTypeByName(next_target, next_orc_type, type, ignore_case) : nullptr; } else if (orc::LIST == orc_type->getKind()) From 424f1bec933c580d2dfa2101f58fd26004599139 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=8E=E6=89=AC?= <654010905@qq.com> Date: Thu, 2 Nov 2023 10:30:28 +0800 Subject: [PATCH 24/99] Apply suggestions from code review Co-authored-by: Kruglov Pavel <48961922+Avogar@users.noreply.github.com> --- src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 542d13d9363..1cbb3b07c00 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -800,14 +800,9 @@ static const orc::Type * traverseDownORCTypeByName( type = array_type->getNestedType(); return traverseDownORCTypeByName(next_target, next_orc_type, type, ignore_case); } - else - return nullptr; } - else - return nullptr; } - else - return nullptr; + return nullptr; } static void updateIncludeTypeIds( From b142489c3c011a58e547bc4102fa79d452f4e712 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Thu, 2 Nov 2023 10:49:18 +0800 Subject: [PATCH 25/99] fix code style --- tests/performance/orc_filter_push_down.xml | 2 +- tests/performance/orc_tuple_field_prune.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/performance/orc_filter_push_down.xml b/tests/performance/orc_filter_push_down.xml index 9f49c20a075..318c6eca991 100644 --- a/tests/performance/orc_filter_push_down.xml +++ b/tests/performance/orc_filter_push_down.xml @@ -23,4 +23,4 @@ select a % 10, length(b) % 10, count(1) from test_orc_fpd where a in (9000000, 1000) group by a % 10, length(b) % 10 - \ No newline at end of file + diff --git a/tests/performance/orc_tuple_field_prune.xml b/tests/performance/orc_tuple_field_prune.xml index 4e338733329..b3064f35f39 100644 --- a/tests/performance/orc_tuple_field_prune.xml +++ b/tests/performance/orc_tuple_field_prune.xml @@ -14,4 +14,4 @@ select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(c Nullable(Int64))') format Null select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(c Nullable(Int64)))') format Null select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(c Nullable(Int64)))') format Null - \ No newline at end of file + From 6ccde98943b2746d2acb3f0cafb77163f56f089d Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Thu, 2 Nov 2023 11:01:50 +0800 Subject: [PATCH 26/99] change as request --- .../Impl/NativeORCBlockInputFormat.cpp | 40 ++++++++----------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 1cbb3b07c00..c53d8c92b2a 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -720,12 +720,8 @@ static std::string toDotColumnPath(const std::vector & columns) } } -static void getFileReaderAndSchema( - ReadBuffer & in, - std::unique_ptr & file_reader, - Block & header, - const FormatSettings & format_settings, - std::atomic & is_stopped) +static void getFileReader( + ReadBuffer & in, std::unique_ptr & file_reader, const FormatSettings & format_settings, std::atomic & is_stopped) { if (is_stopped) return; @@ -733,18 +729,6 @@ static void getFileReaderAndSchema( orc::ReaderOptions options; auto input_stream = asORCInputStream(in, format_settings, is_stopped); file_reader = orc::createReader(std::move(input_stream), options); - const auto & schema = file_reader->getType(); - - for (size_t i = 0; i < schema.getSubtypeCount(); ++i) - { - const std::string & name = schema.getFieldName(i); - const orc::Type * orc_type = schema.getSubtype(i); - - bool skipped = false; - DataTypePtr type = parseORCType(orc_type, format_settings.orc.skip_columns_with_unsupported_types_in_schema_inference, skipped); - if (!skipped) - header.insert(ColumnWithTypeAndName{type, name}); - } } static const orc::Type * traverseDownORCTypeByName( @@ -884,8 +868,7 @@ NativeORCBlockInputFormat::NativeORCBlockInputFormat(ReadBuffer & in_, Block hea void NativeORCBlockInputFormat::prepareFileReader() { - Block schema; - getFileReaderAndSchema(*in, file_reader, schema, format_settings, is_stopped); + getFileReader(*in, file_reader, format_settings, is_stopped); if (is_stopped) return; @@ -1037,17 +1020,28 @@ NativeORCSchemaReader::NativeORCSchemaReader(ReadBuffer & in_, const FormatSetti NamesAndTypesList NativeORCSchemaReader::readSchema() { - Block header; std::unique_ptr file_reader; std::atomic is_stopped = 0; - getFileReaderAndSchema(in, file_reader, header, format_settings, is_stopped); + getFileReader(in, file_reader, format_settings, is_stopped); + + const auto & schema = file_reader->getType(); + Block header; + for (size_t i = 0; i < schema.getSubtypeCount(); ++i) + { + const std::string & name = schema.getFieldName(i); + const orc::Type * orc_type = schema.getSubtype(i); + + bool skipped = false; + DataTypePtr type = parseORCType(orc_type, format_settings.orc.skip_columns_with_unsupported_types_in_schema_inference, skipped); + if (!skipped) + header.insert(ColumnWithTypeAndName{type, name}); + } if (format_settings.schema_inference_make_columns_nullable) return getNamesAndRecursivelyNullableTypes(header); return header.getNamesAndTypesList(); } - ORCColumnToCHColumn::ORCColumnToCHColumn( const Block & header_, bool allow_missing_columns_, bool null_as_default_, bool case_insensitive_matching_) : header(header_) From b8665a610cb7d8920a24a1e3753c785bf42f46a3 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Thu, 2 Nov 2023 15:27:48 +0800 Subject: [PATCH 27/99] fix failed perf test --- tests/performance/orc_tuple_field_prune.xml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/performance/orc_tuple_field_prune.xml b/tests/performance/orc_tuple_field_prune.xml index b3064f35f39..d95787af93b 100644 --- a/tests/performance/orc_tuple_field_prune.xml +++ b/tests/performance/orc_tuple_field_prune.xml @@ -2,16 +2,15 @@ 1 10000 - 0 - insert into function file('test_orc_tfp.orc', 'ORC') select * from generateRandom('int64_column Nullable(Int64), tuple_column Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)), array_tuple_column Array(Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64))), map_tuple_column Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') limit 1000000 + insert into function file('test_orc_tfp.orc', 'ORC') select * from generateRandom('tuple_column Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)), array_tuple_column Nested(a Nullable(String), b Nullable(Float64), c Nullable(Int64)), map_tuple_column Map(String, Tuple(a Nullable(String), b Nullable(Float64), c Nullable(Int64)))') limit 1000000 DROP TABLE IF EXISTS test_orc_tfp - select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, tuple_column Tuple(c Nullable(Int64))') format Null - select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, array_tuple_column Array(Tuple(c Nullable(Int64)))') format Null - select * from file('test_orc_tfp.orc', 'ORC', 'int64_column Int64, map_tuple_column Map(String, Tuple(c Nullable(Int64)))') format Null + select * from file('test_orc_tfp.orc', 'ORC', 'tuple_column Tuple(c Nullable(Int64))') format Null + select * from file('test_orc_tfp.orc', 'ORC', 'array_tuple_column Nested(c Nullable(Int64))') format Null + select * from file('test_orc_tfp.orc', 'ORC', 'map_tuple_column Map(String, Tuple(c Nullable(Int64)))') format Null From 1078047fb59aff24306c26952026278863833721 Mon Sep 17 00:00:00 2001 From: avogar Date: Tue, 24 Oct 2023 13:44:00 +0000 Subject: [PATCH 28/99] Fix 'Cannot read from file:' while running client in a background --- src/Client/ClientBase.cpp | 22 +++++++++++++++++-- .../02903_client_insert_in_background.sh | 13 +++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) create mode 100755 tests/queries/0_stateless/02903_client_insert_in_background.sh diff --git a/src/Client/ClientBase.cpp b/src/Client/ClientBase.cpp index 9c7bfe5974f..76f2060da93 100644 --- a/src/Client/ClientBase.cpp +++ b/src/Client/ClientBase.cpp @@ -108,6 +108,7 @@ namespace ErrorCodes extern const int FILE_ALREADY_EXISTS; extern const int USER_SESSION_LIMIT_EXCEEDED; extern const int NOT_IMPLEMENTED; + extern const int CANNOT_READ_FROM_FILE_DESCRIPTOR; } } @@ -1443,6 +1444,23 @@ void ClientBase::processInsertQuery(const String & query_to_execute, ASTPtr pars } } +namespace +{ + bool isStdinNotEmptyAndValid(ReadBufferFromFileDescriptor & std_in) + { + try + { + return !std_in.eof(); + } + catch (const Exception & e) + { + if (e.code() == ErrorCodes::CANNOT_READ_FROM_FILE_DESCRIPTOR) + return false; + throw; + } + } +} + void ClientBase::sendData(Block & sample, const ColumnsDescription & columns_description, ASTPtr parsed_query) { @@ -1460,7 +1478,7 @@ void ClientBase::sendData(Block & sample, const ColumnsDescription & columns_des if (!parsed_insert_query) return; - bool have_data_in_stdin = !is_interactive && !stdin_is_a_tty && !std_in.eof(); + bool have_data_in_stdin = !is_interactive && !stdin_is_a_tty && isStdinNotEmptyAndValid(std_in); if (need_render_progress) { @@ -1851,7 +1869,7 @@ void ClientBase::processParsedSingleQuery(const String & full_query, const Strin if (is_async_insert_with_inlined_data) { - bool have_data_in_stdin = !is_interactive && !stdin_is_a_tty && !std_in.eof(); + bool have_data_in_stdin = !is_interactive && !stdin_is_a_tty && isStdinNotEmptyAndValid(std_in); bool have_external_data = have_data_in_stdin || insert->infile; if (have_external_data) diff --git a/tests/queries/0_stateless/02903_client_insert_in_background.sh b/tests/queries/0_stateless/02903_client_insert_in_background.sh new file mode 100755 index 00000000000..d5fc56752f6 --- /dev/null +++ b/tests/queries/0_stateless/02903_client_insert_in_background.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +# shellcheck source=../shell_config.sh +. "$CUR_DIR"/../shell_config.sh + +$CLICKHOUSE_CLIENT -q "drop table if exists test" +$CLICKHOUSE_CLIENT -q "create table test (x UInt64) engine=Memory" +nohup $CLICKHOUSE_CLIENT -q "insert into test values (42)" 2> $CLICKHOUSE_TEST_UNIQUE_NAME.out +tail -n +2 $CLICKHOUSE_TEST_UNIQUE_NAME.out +$CLICKHOUSE_CLIENT -q "drop table test" +rm $CLICKHOUSE_TEST_UNIQUE_NAME.out + From b306fdb11d01d3fd1d4c38d5a8facafb77052ecf Mon Sep 17 00:00:00 2001 From: avogar Date: Mon, 30 Oct 2023 10:25:58 +0000 Subject: [PATCH 29/99] Add refernce file --- .../0_stateless/02903_client_insert_in_background.reference | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/queries/0_stateless/02903_client_insert_in_background.reference diff --git a/tests/queries/0_stateless/02903_client_insert_in_background.reference b/tests/queries/0_stateless/02903_client_insert_in_background.reference new file mode 100644 index 00000000000..e69de29bb2d From 7c69dee5cd535ae64062207283b41124270ffee1 Mon Sep 17 00:00:00 2001 From: avogar Date: Thu, 2 Nov 2023 18:05:02 +0000 Subject: [PATCH 30/99] Fix review comment --- src/Client/ClientBase.cpp | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/src/Client/ClientBase.cpp b/src/Client/ClientBase.cpp index 76f2060da93..d3cb828e8f7 100644 --- a/src/Client/ClientBase.cpp +++ b/src/Client/ClientBase.cpp @@ -1385,6 +1385,23 @@ void ClientBase::addMultiquery(std::string_view query, Arguments & common_argume common_arguments.emplace_back(query); } +namespace +{ +bool isStdinNotEmptyAndValid(ReadBufferFromFileDescriptor & std_in) +{ + try + { + return !std_in.eof(); + } + catch (const Exception & e) + { + if (e.code() == ErrorCodes::CANNOT_READ_FROM_FILE_DESCRIPTOR) + return false; + throw; + } +} +} + void ClientBase::processInsertQuery(const String & query_to_execute, ASTPtr parsed_query) { @@ -1404,7 +1421,7 @@ void ClientBase::processInsertQuery(const String & query_to_execute, ASTPtr pars /// Process the query that requires transferring data blocks to the server. const auto & parsed_insert_query = parsed_query->as(); - if ((!parsed_insert_query.data && !parsed_insert_query.infile) && (is_interactive || (!stdin_is_a_tty && std_in.eof()))) + if ((!parsed_insert_query.data && !parsed_insert_query.infile) && (is_interactive || (!stdin_is_a_tty && !isStdinNotEmptyAndValid(std_in)))) { const auto & settings = global_context->getSettingsRef(); if (settings.throw_if_no_data_to_insert) @@ -1444,23 +1461,6 @@ void ClientBase::processInsertQuery(const String & query_to_execute, ASTPtr pars } } -namespace -{ - bool isStdinNotEmptyAndValid(ReadBufferFromFileDescriptor & std_in) - { - try - { - return !std_in.eof(); - } - catch (const Exception & e) - { - if (e.code() == ErrorCodes::CANNOT_READ_FROM_FILE_DESCRIPTOR) - return false; - throw; - } - } -} - void ClientBase::sendData(Block & sample, const ColumnsDescription & columns_description, ASTPtr parsed_query) { From 4b7146d47ed2894068ba6972bdf077df1625e525 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Fri, 3 Nov 2023 16:03:26 +0800 Subject: [PATCH 31/99] remove useless codes --- .../Impl/NativeORCBlockInputFormat.cpp | 63 ------------------- 1 file changed, 63 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index c53d8c92b2a..49379405c26 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -678,48 +678,6 @@ buildORCSearchArgument(const KeyCondition & key_condition, const Block & header, return builder->build(); } - -static std::string toDotColumnPath(const std::vector & columns) -{ - if (columns.empty()) - return {}; - - std::ostringstream column_stream; // STYLE_CHECK_ALLOW_STD_STRING_STREAM - std::copy(columns.begin(), columns.end(), std::ostream_iterator(column_stream, ".")); - std::string column_path = column_stream.str(); - return column_path.substr(0, column_path.length() - 1); -} - -[[maybe_unused]] static void buildORCTypeNameIdMap( - const orc::Type * orc_type, - std::vector & columns, - bool ignore_case, - std::map & id_type_map, - std::map & name_id_map) -{ - id_type_map[orc_type->getColumnId()] = orc_type; - if (orc::STRUCT == orc_type->getKind()) - { - for (size_t i = 0; i < orc_type->getSubtypeCount(); ++i) - { - const std::string & field_name = orc_type->getFieldName(i); - columns.push_back(field_name); - auto column_path = toDotColumnPath(columns); - if (ignore_case) - boost::to_lower(column_path); - name_id_map[column_path] = orc_type->getSubtype(i)->getColumnId(); - buildORCTypeNameIdMap(orc_type->getSubtype(i), columns, ignore_case, id_type_map, name_id_map); - columns.pop_back(); - } - } - else - { - // other non-primitive type - for (size_t j = 0; j < orc_type->getSubtypeCount(); ++j) - buildORCTypeNameIdMap(orc_type->getSubtype(j), columns, ignore_case, id_type_map, name_id_map); - } -} - static void getFileReader( ReadBuffer & in, std::unique_ptr & file_reader, const FormatSettings & format_settings, std::atomic & is_stopped) { @@ -737,7 +695,6 @@ static const orc::Type * traverseDownORCTypeByName( DataTypePtr & type, bool ignore_case) { - // std::cout << "target:" << target << ", orc_type:" << orc_type->toString() << ", type:" << type->getName() << std::endl; if (target.empty()) return orc_type; @@ -792,8 +749,6 @@ static const orc::Type * traverseDownORCTypeByName( static void updateIncludeTypeIds( DataTypePtr type, const orc::Type * orc_type, bool ignore_case, std::unordered_set & include_typeids) { - // std::cout << "ch type:" << type->getName() << ", orc_type:" << orc_type->toString() << std::endl; - /// For primitive types, directly append column id into result if (orc_type->getSubtypeCount() == 0) { @@ -894,24 +849,6 @@ void NativeORCBlockInputFormat::prepareFileReader() } include_indices.assign(include_typeids.begin(), include_typeids.end()); - /// Just for Debug - // std::vector tmp; - // std::map id_type_map; - // std::map name_id_map; - // buildORCTypeNameIdMap(&file_schema, tmp, ignore_case, id_type_map, name_id_map); - // std::cout << "just for debug:" << std::endl; - // std::cout << "subtypes:" << file_reader->getType().getSubtypeCount() << std::endl; - // std::cout << "ch output type:" << getPort().getHeader().dumpStructure() << std::endl; - // std::cout << "orc output type:" << file_reader->getType().toString() << std::endl; - // std::cout << "id type map" << std::endl; - // for (const auto & [k, v] : id_type_map) - // std::cout << "id:" << k << ", type:" << v->toString() << std::endl; - // std::cout << "name id map" << std::endl; - // for (const auto & [k, v] : name_id_map) - // std::cout << "name:" << k << ", id:" << v << std::endl; - // for (const auto & x : include_indices) - // std::cout << "choose " << x << std::endl; - if (format_settings.orc.filter_push_down && key_condition && !sarg) { sarg = buildORCSearchArgument(*key_condition, getPort().getHeader(), file_reader->getType(), format_settings); From 15cb8d1b8975464f7f440e116fb99a48ca0c4865 Mon Sep 17 00:00:00 2001 From: avogar Date: Fri, 3 Nov 2023 13:16:08 +0000 Subject: [PATCH 32/99] Fix use-of-unitialized-value --- contrib/arrow | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/arrow b/contrib/arrow index 8cdbf43f78a..ba5c67934e8 160000 --- a/contrib/arrow +++ b/contrib/arrow @@ -1 +1 @@ -Subproject commit 8cdbf43f78ad02615aef29dc7f9af0dea22a03e4 +Subproject commit ba5c67934e8274d649befcffab56731632dc5253 From 8a6ae6e150271d2a421f7fd3aa95f232d447b582 Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Sat, 4 Nov 2023 09:41:01 +0800 Subject: [PATCH 33/99] change as request --- .../Impl/NativeORCBlockInputFormat.cpp | 37 ++++++++++++------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 49379405c26..60d43cc049a 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -783,22 +783,31 @@ static void updateIncludeTypeIds( { if (tuple_type->haveExplicitNames()) { - const auto & names = tuple_type->getElementNames(); - for (size_t tuple_i = 0; tuple_i < names.size(); ++tuple_i) + std::unordered_map orc_field_name_to_index; + orc_field_name_to_index.reserve(orc_type->getSubtypeCount()); + for (size_t struct_i = 0; struct_i < orc_type->getSubtypeCount(); ++struct_i) { - const auto & name = names[tuple_i]; - for (size_t struct_i = 0; struct_i < orc_type->getSubtypeCount(); ++struct_i) + String field_name = orc_type->getFieldName(struct_i); + if (ignore_case) + boost::to_lower(field_name); + + orc_field_name_to_index[field_name] = struct_i; + } + + const auto & element_names = tuple_type->getElementNames(); + for (size_t tuple_i = 0; tuple_i < element_names.size(); ++tuple_i) + { + String element_name = element_names[tuple_i]; + if (ignore_case) + boost::to_lower(element_name); + + if (orc_field_name_to_index.contains(element_name)) { - if (boost::equals(orc_type->getFieldName(struct_i), name) - || (ignore_case && boost::iequals(orc_type->getFieldName(struct_i), name))) - { - updateIncludeTypeIds( - tuple_type->getElement(tuple_i), - orc_type->getSubtype(struct_i), - ignore_case, - include_typeids); - break; - } + updateIncludeTypeIds( + tuple_type->getElement(tuple_i), + orc_type->getSubtype(orc_field_name_to_index[element_name]), + ignore_case, + include_typeids); } } } From 8e23dd909024ba8109b41b8d54d6e355f11ea0ff Mon Sep 17 00:00:00 2001 From: taiyang-li <654010905@qq.com> Date: Sat, 4 Nov 2023 10:08:26 +0800 Subject: [PATCH 34/99] add some comments --- src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp index 60d43cc049a..3be4b20524f 100644 --- a/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp +++ b/src/Processors/Formats/Impl/NativeORCBlockInputFormat.cpp @@ -778,6 +778,9 @@ static void updateIncludeTypeIds( return; } case orc::STRUCT: { + /// To make sure tuple field pruning work fine, we should include only the fields of orc struct type which are also contained in CH tuple types, instead of all fields of orc struct type. + /// For example, CH tupe type in header is "x Tuple(a String)", ORC struct type is "x struct", then only type id of field "x.a" should be included. + /// For tuple field pruning purpose, we should never include "x.b" for it is not required in format header. const auto * tuple_type = typeid_cast(non_nullable_type.get()); if (tuple_type) { From 399d61b222b2a9b9a082569a2dc29c64c8a14c87 Mon Sep 17 00:00:00 2001 From: flynn Date: Wed, 8 Nov 2023 11:34:49 +0000 Subject: [PATCH 35/99] Fix transfer query to MySQL compatible query --- .../transformQueryForExternalDatabase.cpp | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/Storages/transformQueryForExternalDatabase.cpp b/src/Storages/transformQueryForExternalDatabase.cpp index 84a696a1e9c..7999584ce0e 100644 --- a/src/Storages/transformQueryForExternalDatabase.cpp +++ b/src/Storages/transformQueryForExternalDatabase.cpp @@ -75,6 +75,33 @@ public: } }; +struct ReplaceLiteralToExprVisitorData +{ + using TypeToVisit = ASTFunction; + + void visit(ASTFunction & func, ASTPtr &) const + { + if (func.name == "and" || func.name == "or") + { + for (auto & argument : func.arguments->children) + { + auto * literal_expr = typeid_cast(argument.get()); + UInt64 value; + if (literal_expr && literal_expr->value.tryGet(value) && (value == 0 || value == 1)) + { + /// 1 -> 1=1, 0 -> 1=0. + if (value) + argument = makeASTFunction("equals", std::make_shared(1), std::make_shared(1)); + else + argument = makeASTFunction("equals", std::make_shared(1), std::make_shared(0)); + } + } + } + } +}; + +using ReplaceLiteralToExprVisitor = InDepthNodeVisitor, true>; + class DropAliasesMatcher { public: @@ -288,6 +315,10 @@ String transformQueryForExternalDatabaseImpl( { replaceConstantExpressions(original_where, context, available_columns); + /// Replace like WHERE 1 AND 1 to WHRE 1 = 1 AND 1 = 1 + ReplaceLiteralToExprVisitor::Data replace_literal_to_expr_data; + ReplaceLiteralToExprVisitor(replace_literal_to_expr_data).visit(original_where); + if (isCompatible(original_where)) { select->setExpression(ASTSelectQuery::Expression::WHERE, std::move(original_where)); From d0ba561faacdd97f22f8486b92cc2b29e7236677 Mon Sep 17 00:00:00 2001 From: flynn Date: Wed, 8 Nov 2023 23:20:12 +0800 Subject: [PATCH 36/99] Update src/Storages/transformQueryForExternalDatabase.cpp Co-authored-by: Sema Checherinda <104093494+CheSema@users.noreply.github.com> --- src/Storages/transformQueryForExternalDatabase.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Storages/transformQueryForExternalDatabase.cpp b/src/Storages/transformQueryForExternalDatabase.cpp index 7999584ce0e..fd3c47e0e3f 100644 --- a/src/Storages/transformQueryForExternalDatabase.cpp +++ b/src/Storages/transformQueryForExternalDatabase.cpp @@ -315,7 +315,7 @@ String transformQueryForExternalDatabaseImpl( { replaceConstantExpressions(original_where, context, available_columns); - /// Replace like WHERE 1 AND 1 to WHRE 1 = 1 AND 1 = 1 + /// Replace like WHERE 1 AND 1 to WHERE 1 = 1 AND 1 = 1 ReplaceLiteralToExprVisitor::Data replace_literal_to_expr_data; ReplaceLiteralToExprVisitor(replace_literal_to_expr_data).visit(original_where); From ca18674f1af01bf6463fe683fb8dcb4835b08dd2 Mon Sep 17 00:00:00 2001 From: flynn Date: Wed, 8 Nov 2023 16:21:16 +0000 Subject: [PATCH 37/99] update test --- .../gtest_transform_query_for_external_database.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/Storages/tests/gtest_transform_query_for_external_database.cpp b/src/Storages/tests/gtest_transform_query_for_external_database.cpp index 749a154c19d..c40c9b0f5bc 100644 --- a/src/Storages/tests/gtest_transform_query_for_external_database.cpp +++ b/src/Storages/tests/gtest_transform_query_for_external_database.cpp @@ -279,9 +279,13 @@ TEST(TransformQueryForExternalDatabase, MultipleAndSubqueries) { const State & state = State::instance(); - check(state, 1, {"column"}, - "SELECT column FROM test.table WHERE 1 = 1 AND toString(column) = '42' AND column = 42 AND left(toString(column), 10) = RIGHT(toString(column), 10) AND column IN (1, 42) AND SUBSTRING(toString(column) FROM 1 FOR 2) = 'Hello' AND column != 4", - R"(SELECT "column" FROM "test"."table" WHERE 1 AND ("column" = 42) AND ("column" IN (1, 42)) AND ("column" != 4))"); + check( + state, + 1, + {"column"}, + "SELECT column FROM test.table WHERE 1 = 1 AND toString(column) = '42' AND column = 42 AND left(toString(column), 10) = " + "RIGHT(toString(column), 10) AND column IN (1, 42) AND SUBSTRING(toString(column) FROM 1 FOR 2) = 'Hello' AND column != 4", + R"(SELECT "column" FROM "test"."table" WHERE 1 = 1 AND ("column" = 42) AND ("column" IN (1, 42)) AND ("column" != 4))"); check(state, 1, {"column"}, "SELECT column FROM test.table WHERE toString(column) = '42' AND left(toString(column), 10) = RIGHT(toString(column), 10) AND column = 42", R"(SELECT "column" FROM "test"."table" WHERE "column" = 42)"); From 86685685d36c4a07c631b84589fcd34004a3877f Mon Sep 17 00:00:00 2001 From: Nikolay Degterinsky Date: Tue, 7 Nov 2023 00:16:38 +0000 Subject: [PATCH 38/99] Fix segfault during Kerberos initialization --- src/Access/KerberosInit.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Access/KerberosInit.cpp b/src/Access/KerberosInit.cpp index 58e4a46f2aa..772938ad9b2 100644 --- a/src/Access/KerberosInit.cpp +++ b/src/Access/KerberosInit.cpp @@ -44,7 +44,7 @@ private: krb5_ccache defcache = nullptr; krb5_get_init_creds_opt * options = nullptr; // Credentials structure including ticket, session key, and lifetime info. - krb5_creds my_creds; + krb5_creds my_creds {}; krb5_keytab keytab = nullptr; krb5_principal defcache_princ = nullptr; String fmtError(krb5_error_code code) const; From 44eb73980f1c4bb42e57915d633794715d55e3c4 Mon Sep 17 00:00:00 2001 From: Nikolay Degterinsky Date: Thu, 9 Nov 2023 04:31:49 +0000 Subject: [PATCH 39/99] Fix startup failure due to TTL dependency --- src/Databases/DDLLoadingDependencyVisitor.cpp | 9 +++++++ src/Databases/DDLLoadingDependencyVisitor.h | 1 + .../02908_table_ttl_dependency.reference | 0 .../0_stateless/02908_table_ttl_dependency.sh | 24 +++++++++++++++++++ 4 files changed, 34 insertions(+) create mode 100644 tests/queries/0_stateless/02908_table_ttl_dependency.reference create mode 100755 tests/queries/0_stateless/02908_table_ttl_dependency.sh diff --git a/src/Databases/DDLLoadingDependencyVisitor.cpp b/src/Databases/DDLLoadingDependencyVisitor.cpp index 99538fd801e..fc362dd8578 100644 --- a/src/Databases/DDLLoadingDependencyVisitor.cpp +++ b/src/Databases/DDLLoadingDependencyVisitor.cpp @@ -1,4 +1,5 @@ #include +#include #include #include #include @@ -7,6 +8,7 @@ #include #include #include +#include #include @@ -22,6 +24,7 @@ TableNamesSet getLoadingDependenciesFromCreateQuery(ContextPtr global_context, c data.default_database = global_context->getCurrentDatabase(); data.create_query = ast; data.global_context = global_context; + data.table_name = table; TableLoadingDependenciesVisitor visitor{data}; visitor.visit(ast); data.dependencies.erase(table); @@ -113,6 +116,12 @@ void DDLLoadingDependencyVisitor::visit(const ASTFunctionWithKeyValueArguments & void DDLLoadingDependencyVisitor::visit(const ASTStorage & storage, Data & data) { + if (storage.ttl_table) + { + auto ttl_dependensies = getDependenciesFromCreateQuery(data.global_context, data.table_name, storage.ttl_table->ptr()); + data.dependencies.merge(ttl_dependensies); + } + if (!storage.engine) return; diff --git a/src/Databases/DDLLoadingDependencyVisitor.h b/src/Databases/DDLLoadingDependencyVisitor.h index f173517f852..a9e9f4d7a53 100644 --- a/src/Databases/DDLLoadingDependencyVisitor.h +++ b/src/Databases/DDLLoadingDependencyVisitor.h @@ -38,6 +38,7 @@ public: TableNamesSet dependencies; ContextPtr global_context; ASTPtr create_query; + QualifiedTableName table_name; }; using Visitor = ConstInDepthNodeVisitor; diff --git a/tests/queries/0_stateless/02908_table_ttl_dependency.reference b/tests/queries/0_stateless/02908_table_ttl_dependency.reference new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/queries/0_stateless/02908_table_ttl_dependency.sh b/tests/queries/0_stateless/02908_table_ttl_dependency.sh new file mode 100755 index 00000000000..70136b4a42b --- /dev/null +++ b/tests/queries/0_stateless/02908_table_ttl_dependency.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +# Tags: no-ordinary-database +# Tag no-ordinary-database: requires UUID + +CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) +# shellcheck source=../shell_config.sh +. "$CUR_DIR"/../shell_config.sh + +$CLICKHOUSE_CLIENT -nm -q " + DROP TABLE IF EXISTS 02908_dependent; + DROP TABLE IF EXISTS 02908_main; + + CREATE TABLE 02908_main (a UInt32) ENGINE = MergeTree ORDER BY a; + CREATE TABLE 02908_dependent (a UInt32, ts DateTime) ENGINE = MergeTree ORDER BY a TTL ts + 1 WHERE a IN (SELECT a FROM ${CLICKHOUSE_DATABASE}.02908_main); +" + +$CLICKHOUSE_CLIENT -nm -q " + DROP TABLE 02908_main; +" 2>&1 | grep -F -q "HAVE_DEPENDENT_OBJECTS" + +$CLICKHOUSE_CLIENT -nm -q " + DROP TABLE 02908_dependent; + DROP TABLE 02908_main; +" From 41cdd5dd1a3e520d85f36be71c8fd69967ffee57 Mon Sep 17 00:00:00 2001 From: flynn Date: Thu, 9 Nov 2023 06:23:23 +0000 Subject: [PATCH 40/99] Fix --- .../tests/gtest_transform_query_for_external_database.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Storages/tests/gtest_transform_query_for_external_database.cpp b/src/Storages/tests/gtest_transform_query_for_external_database.cpp index c40c9b0f5bc..1b2a4187c94 100644 --- a/src/Storages/tests/gtest_transform_query_for_external_database.cpp +++ b/src/Storages/tests/gtest_transform_query_for_external_database.cpp @@ -285,7 +285,7 @@ TEST(TransformQueryForExternalDatabase, MultipleAndSubqueries) {"column"}, "SELECT column FROM test.table WHERE 1 = 1 AND toString(column) = '42' AND column = 42 AND left(toString(column), 10) = " "RIGHT(toString(column), 10) AND column IN (1, 42) AND SUBSTRING(toString(column) FROM 1 FOR 2) = 'Hello' AND column != 4", - R"(SELECT "column" FROM "test"."table" WHERE 1 = 1 AND ("column" = 42) AND ("column" IN (1, 42)) AND ("column" != 4))"); + R"(SELECT "column" FROM "test"."table" WHERE (1 = 1) AND ("column" = 42) AND ("column" IN (1, 42)) AND ("column" != 4))"); check(state, 1, {"column"}, "SELECT column FROM test.table WHERE toString(column) = '42' AND left(toString(column), 10) = RIGHT(toString(column), 10) AND column = 42", R"(SELECT "column" FROM "test"."table" WHERE "column" = 42)"); From 4ac3dcc417bb629c9731aa7447d030aabf56b5a3 Mon Sep 17 00:00:00 2001 From: Nikolay Degterinsky Date: Thu, 9 Nov 2023 07:07:41 +0000 Subject: [PATCH 41/99] Fix ALTER COLUMN with ALIAS --- src/Storages/AlterCommands.cpp | 2 +- .../0_stateless/02908_alter_column_alias.reference | 1 + tests/queries/0_stateless/02908_alter_column_alias.sql | 8 ++++++++ 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 tests/queries/0_stateless/02908_alter_column_alias.reference create mode 100644 tests/queries/0_stateless/02908_alter_column_alias.sql diff --git a/src/Storages/AlterCommands.cpp b/src/Storages/AlterCommands.cpp index c6fa17583b5..3d6f4b864a8 100644 --- a/src/Storages/AlterCommands.cpp +++ b/src/Storages/AlterCommands.cpp @@ -1148,7 +1148,7 @@ void AlterCommands::validate(const StoragePtr & table, ContextPtr context) const /// The change of data type to/from Object is broken, so disable it for now if (command.data_type) { - const GetColumnsOptions options(GetColumnsOptions::AllPhysical); + const GetColumnsOptions options(GetColumnsOptions::All); const auto old_data_type = all_columns.getColumn(options, column_name).type; if (command.data_type->getName().contains("Object") diff --git a/tests/queries/0_stateless/02908_alter_column_alias.reference b/tests/queries/0_stateless/02908_alter_column_alias.reference new file mode 100644 index 00000000000..e44df6e9ff6 --- /dev/null +++ b/tests/queries/0_stateless/02908_alter_column_alias.reference @@ -0,0 +1 @@ +CREATE TABLE default.t\n(\n `c0` DateTime,\n `c1` DateTime,\n `a` DateTime ALIAS c1\n)\nENGINE = MergeTree\nORDER BY tuple()\nSETTINGS index_granularity = 8192 diff --git a/tests/queries/0_stateless/02908_alter_column_alias.sql b/tests/queries/0_stateless/02908_alter_column_alias.sql new file mode 100644 index 00000000000..fd98339e8b5 --- /dev/null +++ b/tests/queries/0_stateless/02908_alter_column_alias.sql @@ -0,0 +1,8 @@ +CREATE TABLE t ( + c0 DateTime, + c1 DateTime, + a DateTime alias toStartOfFifteenMinutes(c0) +) ENGINE = MergeTree() ORDER BY tuple(); + +ALTER TABLE t MODIFY COLUMN a DateTime ALIAS c1; +SHOW CREATE t; From 10ac68517e4303e3bf25697d45ff37f1d3c4eee7 Mon Sep 17 00:00:00 2001 From: Nikolay Degterinsky Date: Thu, 9 Nov 2023 07:59:50 +0000 Subject: [PATCH 42/99] Fix empty NAMED COLLECTIONs --- src/Common/NamedCollections/NamedCollectionUtils.cpp | 6 ++++++ src/Parsers/ASTAlterNamedCollectionQuery.cpp | 2 +- .../0_stateless/02908_empty_named_collection.reference | 0 tests/queries/0_stateless/02908_empty_named_collection.sql | 5 +++++ 4 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 tests/queries/0_stateless/02908_empty_named_collection.reference create mode 100644 tests/queries/0_stateless/02908_empty_named_collection.sql diff --git a/src/Common/NamedCollections/NamedCollectionUtils.cpp b/src/Common/NamedCollections/NamedCollectionUtils.cpp index cab844d6213..c535c851464 100644 --- a/src/Common/NamedCollections/NamedCollectionUtils.cpp +++ b/src/Common/NamedCollections/NamedCollectionUtils.cpp @@ -217,6 +217,12 @@ public: for (const auto & [name, value] : result_changes_map) create_query.changes.emplace_back(name, value); + if (create_query.changes.empty()) + throw Exception( + ErrorCodes::BAD_ARGUMENTS, + "Named collection cannot be empty (collection name: {})", + query.collection_name); + writeCreateQueryToMetadata( create_query, getMetadataPath(query.collection_name), diff --git a/src/Parsers/ASTAlterNamedCollectionQuery.cpp b/src/Parsers/ASTAlterNamedCollectionQuery.cpp index 6363a7306bd..00f073b320a 100644 --- a/src/Parsers/ASTAlterNamedCollectionQuery.cpp +++ b/src/Parsers/ASTAlterNamedCollectionQuery.cpp @@ -14,7 +14,7 @@ ASTPtr ASTAlterNamedCollectionQuery::clone() const void ASTAlterNamedCollectionQuery::formatImpl(const IAST::FormatSettings & settings, IAST::FormatState &, IAST::FormatStateStacked) const { - settings.ostr << (settings.hilite ? hilite_keyword : "") << "Alter NAMED COLLECTION "; + settings.ostr << (settings.hilite ? hilite_keyword : "") << "ALTER NAMED COLLECTION "; if (if_exists) settings.ostr << "IF EXISTS "; settings.ostr << (settings.hilite ? hilite_identifier : "") << backQuoteIfNeed(collection_name) << (settings.hilite ? hilite_none : ""); diff --git a/tests/queries/0_stateless/02908_empty_named_collection.reference b/tests/queries/0_stateless/02908_empty_named_collection.reference new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/queries/0_stateless/02908_empty_named_collection.sql b/tests/queries/0_stateless/02908_empty_named_collection.sql new file mode 100644 index 00000000000..6aab83858e8 --- /dev/null +++ b/tests/queries/0_stateless/02908_empty_named_collection.sql @@ -0,0 +1,5 @@ +-- Tags: no-parallel + +CREATE NAMED COLLECTION foobar03 AS a = 1; +ALTER NAMED COLLECTION foobar03 DELETE b; -- { serverError BAD_ARGUMENTS } +DROP NAMED COLLECTION foobar03; From 96f73139b63d38a92e9e35db49b1c0158a1f164f Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Fri, 10 Nov 2023 06:13:55 +0100 Subject: [PATCH 43/99] Check for large translation units --- CMakeLists.txt | 7 +++++++ programs/CMakeLists.txt | 5 +++++ utils/check-style/check-large-objects.sh | 10 ++++++++++ 3 files changed, 22 insertions(+) create mode 100755 utils/check-style/check-large-objects.sh diff --git a/CMakeLists.txt b/CMakeLists.txt index 0d1ef22b2aa..ef97c13fa1c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -622,3 +622,10 @@ if (NATIVE_BUILD_TARGETS COMMAND ${CMAKE_COMMAND} --build "${NATIVE_BUILD_DIR}" --target ${NATIVE_BUILD_TARGETS} COMMAND_ECHO STDOUT) endif () + +if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO") + set(CHECK_LARGE_OBJECT_SIZES_DEFAULT ON) +else () + set(CHECK_LARGE_OBJECT_SIZES_DEFAULT OFF) +endif () +option(CHECK_LARGE_OBJECT_SIZES "Check that there are no large object files after build." ${CHECK_LARGE_OBJECT_SIZES_DEFAULT}) diff --git a/programs/CMakeLists.txt b/programs/CMakeLists.txt index eb4a898d472..8496452e6ea 100644 --- a/programs/CMakeLists.txt +++ b/programs/CMakeLists.txt @@ -432,6 +432,11 @@ if (USE_BINARY_HASH) add_custom_command(TARGET clickhouse POST_BUILD COMMAND ./clickhouse hash-binary > hash && ${OBJCOPY_PATH} --add-section .clickhouse.hash=hash clickhouse COMMENT "Adding section '.clickhouse.hash' to clickhouse binary" VERBATIM) endif() +if (CHECK_LARGE_OBJECT_SIZES) + add_custom_command(TARGET clickhouse POST_BUILD + COMMAND "${CMAKE_SOURCE_DIR}/utils/check-style/check-large-objects.sh" "${CMAKE_BINARY_DIR}") +endif () + if (SPLIT_DEBUG_SYMBOLS) clickhouse_split_debug_symbols(TARGET clickhouse DESTINATION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${SPLITTED_DEBUG_SYMBOLS_DIR} BINARY_PATH clickhouse) else() diff --git a/utils/check-style/check-large-objects.sh b/utils/check-style/check-large-objects.sh new file mode 100755 index 00000000000..c598ff0e99c --- /dev/null +++ b/utils/check-style/check-large-objects.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +# Check that there are no new translation units compiled to an object file larger than a certain size. + +if find $1 -name '*.o' | xargs wc -c | grep -v total | sort -rn | awk '{ if ($1 > 50000000) print }' \ + | grep -v -P 'CastOverloadResolver|AggregateFunctionMax|AggregateFunctionMin|RangeHashedDictionary|Aggregator|AggregateFunctionUniq' +then + echo "^ It's not allowed to have so large translation units." + exit 1 +fi From 0d01438c48282051f8978e5fe30b34db9f2d10fc Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Sat, 11 Nov 2023 00:44:46 +0100 Subject: [PATCH 44/99] Fix error --- CMakeLists.txt | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index ef97c13fa1c..9c8952aea96 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -554,6 +554,13 @@ if (ENABLE_RUST) endif() endif() +if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO") + set(CHECK_LARGE_OBJECT_SIZES_DEFAULT ON) +else () + set(CHECK_LARGE_OBJECT_SIZES_DEFAULT OFF) +endif () +option(CHECK_LARGE_OBJECT_SIZES "Check that there are no large object files after build." ${CHECK_LARGE_OBJECT_SIZES_DEFAULT}) + add_subdirectory (base) add_subdirectory (src) add_subdirectory (programs) @@ -622,10 +629,3 @@ if (NATIVE_BUILD_TARGETS COMMAND ${CMAKE_COMMAND} --build "${NATIVE_BUILD_DIR}" --target ${NATIVE_BUILD_TARGETS} COMMAND_ECHO STDOUT) endif () - -if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO") - set(CHECK_LARGE_OBJECT_SIZES_DEFAULT ON) -else () - set(CHECK_LARGE_OBJECT_SIZES_DEFAULT OFF) -endif () -option(CHECK_LARGE_OBJECT_SIZES "Check that there are no large object files after build." ${CHECK_LARGE_OBJECT_SIZES_DEFAULT}) From 52c825db449800fcdaeb541d3866c9e5bc7f5fe8 Mon Sep 17 00:00:00 2001 From: Robert Schulze Date: Thu, 9 Nov 2023 14:00:02 +0000 Subject: [PATCH 45/99] Query cache: Allow to ignore non-deterministic queries Fixes: #56504 --- docs/en/operations/query-cache.md | 2 +- docs/en/operations/settings/settings.md | 11 ++-- src/Core/Settings.h | 3 +- src/Core/SettingsEnums.cpp | 6 +++ src/Core/SettingsEnums.h | 10 ++++ src/Interpreters/executeQuery.cpp | 51 ++++++++++--------- ...cache_nondeterministic_functions.reference | 4 ++ ...query_cache_nondeterministic_functions.sql | 26 +++++++--- .../02888_obsolete_settings.reference | 1 + 9 files changed, 78 insertions(+), 36 deletions(-) diff --git a/docs/en/operations/query-cache.md b/docs/en/operations/query-cache.md index 665ae6cdfdc..e1f43484082 100644 --- a/docs/en/operations/query-cache.md +++ b/docs/en/operations/query-cache.md @@ -169,7 +169,7 @@ Also, results of queries with non-deterministic functions are not cached by defa [`getMacro()`](../sql-reference/functions/other-functions.md#getMacro) etc. To force caching of results of queries with non-deterministic functions regardless, use setting -[query_cache_store_results_of_queries_with_nondeterministic_functions](settings/settings.md#query-cache-store-results-of-queries-with-nondeterministic-functions). +[query_cache_nondeterministic_function_handling](settings/settings.md#query-cache-nondeterministic-function-handling). Finally, entries in the query cache are not shared between users due to security reasons. For example, user A must not be able to bypass a row policy on a table by running the same query as another user B for whom no such policy exists. However, if necessary, cache entries can diff --git a/docs/en/operations/settings/settings.md b/docs/en/operations/settings/settings.md index 2f3805e8e55..4cae272880a 100644 --- a/docs/en/operations/settings/settings.md +++ b/docs/en/operations/settings/settings.md @@ -1657,16 +1657,17 @@ Possible values: Default value: `1`. -## query_cache_store_results_of_queries_with_nondeterministic_functions {#query-cache-store-results-of-queries-with-nondeterministic-functions} +## query_cache_non_deterministic_function_handling {#query-cache-nondeterministic-function-handling} -If turned on, then results of `SELECT` queries with non-deterministic functions (e.g. `rand()`, `now()`) can be cached in the [query cache](../query-cache.md). +Controls how the [query cache](../query-cache.md) handles `SELECT` queries with non-deterministic functions like `rand()` or `now()`. Possible values: -- 0 - Disabled -- 1 - Enabled +- `'throw'` - Throw an exception. +- `'save'` - Cache the query result even if it is non-deterministic. +- `'ignore'` - Don't cache the query result (but also don't throw an exception). -Default value: `0`. +Default value: `throw`. ## query_cache_min_query_runs {#query-cache-min-query-runs} diff --git a/src/Core/Settings.h b/src/Core/Settings.h index 5c41c0b0829..4eb70edb8e2 100644 --- a/src/Core/Settings.h +++ b/src/Core/Settings.h @@ -619,7 +619,7 @@ class IColumn; M(Bool, use_query_cache, false, "Enable the query cache", 0) \ M(Bool, enable_writes_to_query_cache, true, "Enable storing results of SELECT queries in the query cache", 0) \ M(Bool, enable_reads_from_query_cache, true, "Enable reading results of SELECT queries from the query cache", 0) \ - M(Bool, query_cache_store_results_of_queries_with_nondeterministic_functions, false, "Store results of queries with non-deterministic functions (e.g. rand(), now()) in the query cache", 0) \ + M(QueryCacheNondeterministicFunctionHandling, query_cache_nondeterministic_function_handling, QueryCacheNondeterministicFunctionHandling::Throw, "How the query cache handles queries with non-deterministic functions, e.g. now()", 0) \ M(UInt64, query_cache_max_size_in_bytes, 0, "The maximum amount of memory (in bytes) the current user may allocate in the query cache. 0 means unlimited. ", 0) \ M(UInt64, query_cache_max_entries, 0, "The maximum number of query results the current user may store in the query cache. 0 means unlimited.", 0) \ M(UInt64, query_cache_min_query_runs, 0, "Minimum number a SELECT query must run before its result is stored in the query cache", 0) \ @@ -877,6 +877,7 @@ class IColumn; MAKE_OBSOLETE(M, Bool, optimize_duplicate_order_by_and_distinct, false) \ MAKE_OBSOLETE(M, UInt64, parallel_replicas_min_number_of_granules_to_enable, 0) \ MAKE_OBSOLETE(M, Bool, query_plan_optimize_projection, true) \ + MAKE_OBSOLETE(M, Bool, query_cache_store_results_of_queries_with_nondeterministic_functions, false) \ /** The section above is for obsolete settings. Do not add anything there. */ diff --git a/src/Core/SettingsEnums.cpp b/src/Core/SettingsEnums.cpp index 836993b724a..bec9e3a6afe 100644 --- a/src/Core/SettingsEnums.cpp +++ b/src/Core/SettingsEnums.cpp @@ -69,6 +69,12 @@ IMPLEMENT_SETTING_ENUM(DistributedProductMode, ErrorCodes::UNKNOWN_DISTRIBUTED_P {"allow", DistributedProductMode::ALLOW}}) +IMPLEMENT_SETTING_ENUM(QueryCacheNondeterministicFunctionHandling, ErrorCodes::BAD_ARGUMENTS, + {{"throw", QueryCacheNondeterministicFunctionHandling::Throw}, + {"save", QueryCacheNondeterministicFunctionHandling::Save}, + {"ignore", QueryCacheNondeterministicFunctionHandling::Ignore}}) + + IMPLEMENT_SETTING_ENUM(DateTimeInputFormat, ErrorCodes::BAD_ARGUMENTS, {{"basic", FormatSettings::DateTimeInputFormat::Basic}, {"best_effort", FormatSettings::DateTimeInputFormat::BestEffort}, diff --git a/src/Core/SettingsEnums.h b/src/Core/SettingsEnums.h index 29776f66608..0d6e87f25c2 100644 --- a/src/Core/SettingsEnums.h +++ b/src/Core/SettingsEnums.h @@ -70,6 +70,16 @@ enum class DistributedProductMode DECLARE_SETTING_ENUM(DistributedProductMode) +/// How the query cache handles queries with non-deterministic functions, e.g. now() +enum class QueryCacheNondeterministicFunctionHandling +{ + Throw, + Save, + Ignore +}; + +DECLARE_SETTING_ENUM(QueryCacheNondeterministicFunctionHandling) + DECLARE_SETTING_ENUM_WITH_RENAME(DateTimeInputFormat, FormatSettings::DateTimeInputFormat) diff --git a/src/Interpreters/executeQuery.cpp b/src/Interpreters/executeQuery.cpp index 8cd3c8ab848..be59ad22925 100644 --- a/src/Interpreters/executeQuery.cpp +++ b/src/Interpreters/executeQuery.cpp @@ -1106,32 +1106,37 @@ static std::tuple executeQueryImpl( /// top of the pipeline which stores the result in the query cache. if (can_use_query_cache && settings.enable_writes_to_query_cache) { - if (astContainsNonDeterministicFunctions(ast, context) && !settings.query_cache_store_results_of_queries_with_nondeterministic_functions) + const bool ast_contains_nondeterministic_functions = astContainsNonDeterministicFunctions(ast, context); + const QueryCacheNondeterministicFunctionHandling nondeterministic_function_handling = settings.query_cache_nondeterministic_function_handling; + if (ast_contains_nondeterministic_functions && nondeterministic_function_handling == QueryCacheNondeterministicFunctionHandling::Throw) throw Exception(ErrorCodes::CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS, - "Unable to cache the query result because the query contains a non-deterministic function. Use setting `query_cache_store_results_of_queries_with_nondeterministic_functions = 1` to cache the query result regardless"); + "Unable to cache the query result because the query contains a non-deterministic function. Use setting `query_cache_nondeterministic_function_handling = 'save'` or `= 'ignore'`to cache the query result regardless or omit caching."); - QueryCache::Key key( - ast, res.pipeline.getHeader(), - context->getUserName(), settings.query_cache_share_between_users, - std::chrono::system_clock::now() + std::chrono::seconds(settings.query_cache_ttl), - settings.query_cache_compress_entries); + if (!ast_contains_nondeterministic_functions || (ast_contains_nondeterministic_functions && nondeterministic_function_handling == QueryCacheNondeterministicFunctionHandling::Save)) + { + QueryCache::Key key( + ast, res.pipeline.getHeader(), + context->getUserName(), settings.query_cache_share_between_users, + std::chrono::system_clock::now() + std::chrono::seconds(settings.query_cache_ttl), + settings.query_cache_compress_entries); - const size_t num_query_runs = query_cache->recordQueryRun(key); - if (num_query_runs <= settings.query_cache_min_query_runs) - { - LOG_TRACE(&Poco::Logger::get("QueryCache"), "Skipped insert because the query ran {} times but the minimum required number of query runs to cache the query result is {}", num_query_runs, settings.query_cache_min_query_runs); - } - else - { - auto query_cache_writer = std::make_shared(query_cache->createWriter( - key, - std::chrono::milliseconds(settings.query_cache_min_query_duration.totalMilliseconds()), - settings.query_cache_squash_partial_results, - settings.max_block_size, - settings.query_cache_max_size_in_bytes, - settings.query_cache_max_entries)); - res.pipeline.writeResultIntoQueryCache(query_cache_writer); - query_cache_usage = QueryCache::Usage::Write; + const size_t num_query_runs = query_cache->recordQueryRun(key); + if (num_query_runs <= settings.query_cache_min_query_runs) + { + LOG_TRACE(&Poco::Logger::get("QueryCache"), "Skipped insert because the query ran {} times but the minimum required number of query runs to cache the query result is {}", num_query_runs, settings.query_cache_min_query_runs); + } + else + { + auto query_cache_writer = std::make_shared(query_cache->createWriter( + key, + std::chrono::milliseconds(settings.query_cache_min_query_duration.totalMilliseconds()), + settings.query_cache_squash_partial_results, + settings.max_block_size, + settings.query_cache_max_size_in_bytes, + settings.query_cache_max_entries)); + res.pipeline.writeResultIntoQueryCache(query_cache_writer); + query_cache_usage = QueryCache::Usage::Write; + } } } diff --git a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.reference b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.reference index e666f54d4c4..732f96219bd 100644 --- a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.reference +++ b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.reference @@ -2,3 +2,7 @@ --- 1 1 +--- +1 +0 +--- diff --git a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql index 62e0b099d7a..f6f7eff1cc5 100644 --- a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql +++ b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql @@ -1,16 +1,30 @@ --- Tags: no-parallel -- Tag no-parallel: Messes with internal cache +-- Tags: no-parallel SYSTEM DROP QUERY CACHE; -- rand() is non-deterministic, the query is rejected by default -SELECT COUNT(rand(1)) SETTINGS use_query_cache = true; -- { serverError CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS } -SELECT COUNT(*) FROM system.query_cache; +-- to throw is the default behavior +SELECT count(rand(1)) SETTINGS use_query_cache = true; -- { serverError CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS } +SELECT count(rand(1)) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'throw'; -- { serverError CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS } +SELECT count(*) FROM system.query_cache; + +SYSTEM DROP QUERY CACHE; SELECT '---'; --- Force caching using a setting -SELECT COUNT(RAND(1)) SETTINGS use_query_cache = true, query_cache_store_results_of_queries_with_nondeterministic_functions = true; -SELECT COUNT(*) FROM system.query_cache; +-- 'save' forces caching +SELECT count(rand(1)) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'save'; +SELECT count(*) FROM system.query_cache; SYSTEM DROP QUERY CACHE; + +SELECT '---'; + +-- 'ignore' suppresses the exception but doesn't cache +SELECT count(rand(1)) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'ignore'; +SELECT count(*) FROM system.query_cache; + +SYSTEM DROP QUERY CACHE; + +SELECT '---'; diff --git a/tests/queries/0_stateless/02888_obsolete_settings.reference b/tests/queries/0_stateless/02888_obsolete_settings.reference index 6ee5216cd73..63553092c0c 100644 --- a/tests/queries/0_stateless/02888_obsolete_settings.reference +++ b/tests/queries/0_stateless/02888_obsolete_settings.reference @@ -42,6 +42,7 @@ optimize_duplicate_order_by_and_distinct optimize_fuse_sum_count_avg parallel_replicas_min_number_of_granules_to_enable partial_merge_join_optimizations +query_cache_store_results_of_queries_with_nondeterministic_functions query_plan_optimize_projection replication_alter_columns_timeout restore_threads From 9b61de2b8be913bc445f5725b70a75b6730e080d Mon Sep 17 00:00:00 2001 From: Robert Schulze Date: Thu, 9 Nov 2023 14:28:42 +0000 Subject: [PATCH 46/99] Fix whitespace --- src/Core/SettingsEnums.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Core/SettingsEnums.cpp b/src/Core/SettingsEnums.cpp index bec9e3a6afe..b853b0d0a0b 100644 --- a/src/Core/SettingsEnums.cpp +++ b/src/Core/SettingsEnums.cpp @@ -70,8 +70,8 @@ IMPLEMENT_SETTING_ENUM(DistributedProductMode, ErrorCodes::UNKNOWN_DISTRIBUTED_P IMPLEMENT_SETTING_ENUM(QueryCacheNondeterministicFunctionHandling, ErrorCodes::BAD_ARGUMENTS, - {{"throw", QueryCacheNondeterministicFunctionHandling::Throw}, - {"save", QueryCacheNondeterministicFunctionHandling::Save}, + {{"throw", QueryCacheNondeterministicFunctionHandling::Throw}, + {"save", QueryCacheNondeterministicFunctionHandling::Save}, {"ignore", QueryCacheNondeterministicFunctionHandling::Ignore}}) From 91f53514d9da76aebfaf5313b1f4ca502370d62c Mon Sep 17 00:00:00 2001 From: Robert Schulze Date: Thu, 9 Nov 2023 19:54:09 +0000 Subject: [PATCH 47/99] Incorporate review feedback --- docs/en/operations/query-cache.md | 5 +++++ docs/en/operations/settings/settings.md | 2 +- src/Interpreters/executeQuery.cpp | 10 +++++++--- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/docs/en/operations/query-cache.md b/docs/en/operations/query-cache.md index e1f43484082..def0f48b968 100644 --- a/docs/en/operations/query-cache.md +++ b/docs/en/operations/query-cache.md @@ -171,6 +171,11 @@ Also, results of queries with non-deterministic functions are not cached by defa To force caching of results of queries with non-deterministic functions regardless, use setting [query_cache_nondeterministic_function_handling](settings/settings.md#query-cache-nondeterministic-function-handling). +:::note +Prior to ClickHouse v23.11, setting 'query_cache_store_results_of_queries_with_nondeterministic_functions = 0 / 1' controlled whether +results of queries with non-deterministic results were cached. In newer ClickHouse versions, this setting is obsolete and has no effect. +::: + Finally, entries in the query cache are not shared between users due to security reasons. For example, user A must not be able to bypass a row policy on a table by running the same query as another user B for whom no such policy exists. However, if necessary, cache entries can be marked accessible by other users (i.e. shared) by supplying setting diff --git a/docs/en/operations/settings/settings.md b/docs/en/operations/settings/settings.md index 4cae272880a..67b62501dd9 100644 --- a/docs/en/operations/settings/settings.md +++ b/docs/en/operations/settings/settings.md @@ -1665,7 +1665,7 @@ Possible values: - `'throw'` - Throw an exception. - `'save'` - Cache the query result even if it is non-deterministic. -- `'ignore'` - Don't cache the query result (but also don't throw an exception). +- `'ignore'` - Don't cache the query result and don't throw an exception. Default value: `throw`. diff --git a/src/Interpreters/executeQuery.cpp b/src/Interpreters/executeQuery.cpp index be59ad22925..bd64822fa40 100644 --- a/src/Interpreters/executeQuery.cpp +++ b/src/Interpreters/executeQuery.cpp @@ -1108,11 +1108,13 @@ static std::tuple executeQueryImpl( { const bool ast_contains_nondeterministic_functions = astContainsNonDeterministicFunctions(ast, context); const QueryCacheNondeterministicFunctionHandling nondeterministic_function_handling = settings.query_cache_nondeterministic_function_handling; + if (ast_contains_nondeterministic_functions && nondeterministic_function_handling == QueryCacheNondeterministicFunctionHandling::Throw) throw Exception(ErrorCodes::CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS, - "Unable to cache the query result because the query contains a non-deterministic function. Use setting `query_cache_nondeterministic_function_handling = 'save'` or `= 'ignore'`to cache the query result regardless or omit caching."); + "The query result was not cached because the query contains a non-deterministic function." + " Use setting `query_cache_nondeterministic_function_handling = 'save'` or `= 'ignore'` to cache the query result regardless or to omit caching"); - if (!ast_contains_nondeterministic_functions || (ast_contains_nondeterministic_functions && nondeterministic_function_handling == QueryCacheNondeterministicFunctionHandling::Save)) + if (!ast_contains_nondeterministic_functions || nondeterministic_function_handling == QueryCacheNondeterministicFunctionHandling::Save) { QueryCache::Key key( ast, res.pipeline.getHeader(), @@ -1123,7 +1125,9 @@ static std::tuple executeQueryImpl( const size_t num_query_runs = query_cache->recordQueryRun(key); if (num_query_runs <= settings.query_cache_min_query_runs) { - LOG_TRACE(&Poco::Logger::get("QueryCache"), "Skipped insert because the query ran {} times but the minimum required number of query runs to cache the query result is {}", num_query_runs, settings.query_cache_min_query_runs); + LOG_TRACE(&Poco::Logger::get("QueryCache"), + "Skipped insert because the query ran {} times but the minimum required number of query runs to cache the query result is {}", + num_query_runs, settings.query_cache_min_query_runs); } else { From bea529c9a19983fa9fedc9c4a0f9037923dc2d50 Mon Sep 17 00:00:00 2001 From: Robert Schulze Date: Fri, 10 Nov 2023 11:39:56 +0000 Subject: [PATCH 48/99] Try to stabilize test results --- .../functions/date-time-functions.md | 2 +- src/Common/ErrorCodes.cpp | 2 +- src/Interpreters/executeQuery.cpp | 4 ++-- ...cache_nondeterministic_functions.reference | 6 +++--- ...query_cache_nondeterministic_functions.sql | 21 +++++++------------ 5 files changed, 14 insertions(+), 21 deletions(-) diff --git a/docs/en/sql-reference/functions/date-time-functions.md b/docs/en/sql-reference/functions/date-time-functions.md index 55d09be7847..43f7c9cc61e 100644 --- a/docs/en/sql-reference/functions/date-time-functions.md +++ b/docs/en/sql-reference/functions/date-time-functions.md @@ -1381,7 +1381,7 @@ toStartOfFifteenMinutes(toDateTime('2023-04-21 10:20:00')): 2023-04-21 10:15:00 toStartOfFifteenMinutes(toDateTime('2023-04-21 10:23:00')): 2023-04-21 10:15:00 ``` -## toStartOfInterval(time_or_data, INTERVAL x unit \[, time_zone\]) +## toStartOfInterval(date_or_date_with_time, INTERVAL x unit \[, time_zone\]) This function generalizes other `toStartOf*()` functions. For example, - `toStartOfInterval(t, INTERVAL 1 year)` returns the same as `toStartOfYear(t)`, diff --git a/src/Common/ErrorCodes.cpp b/src/Common/ErrorCodes.cpp index 28f8e6c6021..95ca49d2713 100644 --- a/src/Common/ErrorCodes.cpp +++ b/src/Common/ErrorCodes.cpp @@ -583,7 +583,7 @@ M(701, CLUSTER_DOESNT_EXIST) \ M(702, CLIENT_INFO_DOES_NOT_MATCH) \ M(703, INVALID_IDENTIFIER) \ - M(704, CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS) \ + M(704, QUERY_CACHE_USED_WITH_NONDETERMINISTIC_FUNCTIONS) \ M(705, TABLE_NOT_EMPTY) \ M(706, LIBSSH_ERROR) \ M(999, KEEPER_EXCEPTION) \ diff --git a/src/Interpreters/executeQuery.cpp b/src/Interpreters/executeQuery.cpp index bd64822fa40..bfa54e7db35 100644 --- a/src/Interpreters/executeQuery.cpp +++ b/src/Interpreters/executeQuery.cpp @@ -96,7 +96,7 @@ namespace DB namespace ErrorCodes { - extern const int CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS; + extern const int QUERY_CACHE_USED_WITH_NONDETERMINISTIC_FUNCTIONS; extern const int INTO_OUTFILE_NOT_ALLOWED; extern const int INVALID_TRANSACTION; extern const int LOGICAL_ERROR; @@ -1110,7 +1110,7 @@ static std::tuple executeQueryImpl( const QueryCacheNondeterministicFunctionHandling nondeterministic_function_handling = settings.query_cache_nondeterministic_function_handling; if (ast_contains_nondeterministic_functions && nondeterministic_function_handling == QueryCacheNondeterministicFunctionHandling::Throw) - throw Exception(ErrorCodes::CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS, + throw Exception(ErrorCodes::QUERY_CACHE_USED_WITH_NONDETERMINISTIC_FUNCTIONS, "The query result was not cached because the query contains a non-deterministic function." " Use setting `query_cache_nondeterministic_function_handling = 'save'` or `= 'ignore'` to cache the query result regardless or to omit caching"); diff --git a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.reference b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.reference index 732f96219bd..74dcf748395 100644 --- a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.reference +++ b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.reference @@ -1,8 +1,8 @@ +-- query_cache_nondeterministic_function_handling = throw 0 ---- +-- query_cache_nondeterministic_function_handling = save 1 1 ---- +-- query_cache_nondeterministic_function_handling = ignore 1 0 ---- diff --git a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql index f6f7eff1cc5..1192a19e26b 100644 --- a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql +++ b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql @@ -3,28 +3,21 @@ SYSTEM DROP QUERY CACHE; --- rand() is non-deterministic, the query is rejected by default --- to throw is the default behavior -SELECT count(rand(1)) SETTINGS use_query_cache = true; -- { serverError CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS } -SELECT count(rand(1)) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'throw'; -- { serverError CANNOT_USE_QUERY_CACHE_WITH_NONDETERMINISTIC_FUNCTIONS } +SELECT '-- query_cache_nondeterministic_function_handling = throw'; +SELECT count(now()) SETTINGS use_query_cache = true; -- { serverError QUERY_CACHE_USED_WITH_NONDETERMINISTIC_FUNCTIONS } +SELECT count(now()) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'throw'; -- { serverError QUERY_CACHE_USED_WITH_NONDETERMINISTIC_FUNCTIONS } SELECT count(*) FROM system.query_cache; SYSTEM DROP QUERY CACHE; -SELECT '---'; - --- 'save' forces caching -SELECT count(rand(1)) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'save'; +SELECT '-- query_cache_nondeterministic_function_handling = save'; +SELECT count(now()) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'save'; SELECT count(*) FROM system.query_cache; SYSTEM DROP QUERY CACHE; -SELECT '---'; - --- 'ignore' suppresses the exception but doesn't cache -SELECT count(rand(1)) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'ignore'; +SELECT '-- query_cache_nondeterministic_function_handling = ignore'; +SELECT count(now()) SETTINGS use_query_cache = true, query_cache_nondeterministic_function_handling = 'ignore'; SELECT count(*) FROM system.query_cache; SYSTEM DROP QUERY CACHE; - -SELECT '---'; From 9194f77e71ba69bc290d907015b11709d6f8d8e8 Mon Sep 17 00:00:00 2001 From: jsc0218 Date: Mon, 13 Nov 2023 02:11:31 +0000 Subject: [PATCH 49/99] change default value of PG's conn timeout and try times to avoid endless wait when conn url is unavailable --- src/Core/PostgreSQL/PoolWithFailover.h | 2 +- src/Core/PostgreSQL/Utils.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Core/PostgreSQL/PoolWithFailover.h b/src/Core/PostgreSQL/PoolWithFailover.h index bf9c34e6723..bf3782afba4 100644 --- a/src/Core/PostgreSQL/PoolWithFailover.h +++ b/src/Core/PostgreSQL/PoolWithFailover.h @@ -14,7 +14,7 @@ static constexpr inline auto POSTGRESQL_POOL_DEFAULT_SIZE = 16; static constexpr inline auto POSTGRESQL_POOL_WAIT_TIMEOUT = 5000; -static constexpr inline auto POSTGRESQL_POOL_WITH_FAILOVER_DEFAULT_MAX_TRIES = 5; +static constexpr inline auto POSTGRESQL_POOL_WITH_FAILOVER_DEFAULT_MAX_TRIES = 2; namespace postgres { diff --git a/src/Core/PostgreSQL/Utils.cpp b/src/Core/PostgreSQL/Utils.cpp index b4ad19c819a..810bf62fdab 100644 --- a/src/Core/PostgreSQL/Utils.cpp +++ b/src/Core/PostgreSQL/Utils.cpp @@ -16,7 +16,7 @@ ConnectionInfo formatConnectionString(String dbname, String host, UInt16 port, S << " port=" << port << " user=" << DB::quote << user << " password=" << DB::quote << password - << " connect_timeout=10"; + << " connect_timeout=2"; return {out.str(), host + ':' + DB::toString(port)}; } From 9a7f5ab0a1e265598b7db147d3be95c398ecbb99 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 13 Nov 2023 08:54:24 +0100 Subject: [PATCH 50/99] Better use of build cache --- src/CMakeLists.txt | 1 - src/Client/Connection.cpp | 2 +- src/Common/config_version.cpp.in | 14 +++++++++++ src/Common/config_version.h | 23 ++++++++++++++++++ src/Common/config_version.h.in | 24 ------------------- src/Daemon/BaseDaemon.cpp | 6 +---- src/Interpreters/ClientInfo.cpp | 2 +- src/Processors/Formats/Impl/Parquet/Write.cpp | 2 +- 8 files changed, 41 insertions(+), 33 deletions(-) create mode 100644 src/Common/config_version.h delete mode 100644 src/Common/config_version.h.in diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 0257b7d329b..bac06a2f108 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -18,7 +18,6 @@ include (../cmake/version.cmake) message (STATUS "Will build ${VERSION_FULL} revision ${VERSION_REVISION} ${VERSION_OFFICIAL}") include (configure_config.cmake) configure_file (Common/config.h.in ${CONFIG_INCLUDE_PATH}/config.h) -configure_file (Common/config_version.h.in ${CONFIG_INCLUDE_PATH}/config_version.h) configure_file (Common/config_version.cpp.in ${CONFIG_INCLUDE_PATH}/config_version.cpp) if (USE_DEBUG_HELPERS) diff --git a/src/Client/Connection.cpp b/src/Client/Connection.cpp index cf25ee6e856..7af9c99f50c 100644 --- a/src/Client/Connection.cpp +++ b/src/Client/Connection.cpp @@ -296,7 +296,7 @@ void Connection::sendHello() "Parameters 'default_database', 'user' and 'password' must not contain ASCII control characters"); writeVarUInt(Protocol::Client::Hello, *out); - writeStringBinary((VERSION_NAME " ") + client_name, *out); + writeStringBinary(std::string(VERSION_NAME) + " " + client_name, *out); writeVarUInt(VERSION_MAJOR, *out); writeVarUInt(VERSION_MINOR, *out); // NOTE For backward compatibility of the protocol, client cannot send its version_patch. diff --git a/src/Common/config_version.cpp.in b/src/Common/config_version.cpp.in index f31e82bf582..26ae0f0f4d7 100644 --- a/src/Common/config_version.cpp.in +++ b/src/Common/config_version.cpp.in @@ -1,3 +1,17 @@ /// This file was autogenerated by CMake +#include "config_version.h" + +const unsigned VERSION_REVISION = @VERSION_REVISION@; +const char * VERSION_NAME = "@VERSION_NAME@"; +const unsigned VERSION_MAJOR = @VERSION_MAJOR@; +const unsigned VERSION_MINOR = @VERSION_MINOR@; +const unsigned VERSION_PATCH = @VERSION_PATCH@; +const char * VERSION_STRING = "@VERSION_STRING@"; +const char * VERSION_STRING_SHORT = "@VERSION_STRING_SHORT@"; +const char * VERSION_OFFICIAL = "@VERSION_OFFICIAL@"; +const char * VERSION_FULL = "@VERSION_FULL@"; +const char * VERSION_DESCRIBE = "@VERSION_DESCRIBE@"; +const unsigned VERSION_INTEGER = @VERSION_INTEGER@; + const char * VERSION_GITHASH = "@VERSION_GITHASH@"; diff --git a/src/Common/config_version.h b/src/Common/config_version.h new file mode 100644 index 00000000000..b1e0ea67a68 --- /dev/null +++ b/src/Common/config_version.h @@ -0,0 +1,23 @@ +/// This file was autogenerated by CMake + +#pragma once + +/// These fields are changing only on every release, but we still don't want to have them in the header file, +/// because it will make build cache less efficient. + +// NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION, +// only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes. +extern const unsigned VERSION_REVISION; +extern const char * VERSION_NAME; +extern const unsigned VERSION_MAJOR; +extern const unsigned VERSION_MINOR; +extern const unsigned VERSION_PATCH; +extern const char * VERSION_STRING; +extern const char * VERSION_STRING_SHORT; +extern const char * VERSION_OFFICIAL; +extern const char * VERSION_FULL; +extern const char * VERSION_DESCRIBE; +extern const unsigned VERSION_INTEGER; + +/// These fields are frequently changing and we don't want to have them in the header file to allow caching. +extern const char * VERSION_GITHASH; diff --git a/src/Common/config_version.h.in b/src/Common/config_version.h.in deleted file mode 100644 index aa8ddeeb860..00000000000 --- a/src/Common/config_version.h.in +++ /dev/null @@ -1,24 +0,0 @@ -/// This file was autogenerated by CMake - -#pragma once - -// NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION, -// only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes. -#cmakedefine VERSION_REVISION @VERSION_REVISION@ -#cmakedefine VERSION_NAME "@VERSION_NAME@" -#cmakedefine VERSION_MAJOR @VERSION_MAJOR@ -#cmakedefine VERSION_MINOR @VERSION_MINOR@ -#cmakedefine VERSION_PATCH @VERSION_PATCH@ -#cmakedefine VERSION_STRING "@VERSION_STRING@" -#cmakedefine VERSION_STRING_SHORT "@VERSION_STRING_SHORT@" -#cmakedefine VERSION_OFFICIAL "@VERSION_OFFICIAL@" -#cmakedefine VERSION_FULL "@VERSION_FULL@" -#cmakedefine VERSION_DESCRIBE "@VERSION_DESCRIBE@" -#cmakedefine VERSION_INTEGER @VERSION_INTEGER@ - -/// These fields are frequently changing and we don't want to have them in the header file to allow caching. -extern const char * VERSION_GITHASH; - -#if !defined(VERSION_OFFICIAL) -# define VERSION_OFFICIAL "" -#endif diff --git a/src/Daemon/BaseDaemon.cpp b/src/Daemon/BaseDaemon.cpp index f9e402e51ee..dba4b13835d 100644 --- a/src/Daemon/BaseDaemon.cpp +++ b/src/Daemon/BaseDaemon.cpp @@ -485,10 +485,8 @@ private: { SentryWriter::onFault(sig, error_message, stack_trace); -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wunreachable-code" /// Advice the user to send it manually. - if constexpr (std::string_view(VERSION_OFFICIAL).contains("official build")) + if (std::string_view(VERSION_OFFICIAL).contains("official build")) { const auto & date_lut = DateLUT::instance(); @@ -506,8 +504,6 @@ private: { LOG_FATAL(log, "This ClickHouse version is not official and should be upgraded to the official build."); } -#pragma clang diagnostic pop - } /// ClickHouse Keeper does not link to some part of Settings. diff --git a/src/Interpreters/ClientInfo.cpp b/src/Interpreters/ClientInfo.cpp index 92ef5a0d159..b478daabc08 100644 --- a/src/Interpreters/ClientInfo.cpp +++ b/src/Interpreters/ClientInfo.cpp @@ -196,7 +196,7 @@ void ClientInfo::setInitialQuery() if (client_name.empty()) client_name = VERSION_NAME; else - client_name = (VERSION_NAME " ") + client_name; + client_name = std::string(VERSION_NAME) + " " + client_name; } bool ClientInfo::clientVersionEquals(const ClientInfo & other, bool compare_patch) const diff --git a/src/Processors/Formats/Impl/Parquet/Write.cpp b/src/Processors/Formats/Impl/Parquet/Write.cpp index 340ed5f510c..92d9df9b391 100644 --- a/src/Processors/Formats/Impl/Parquet/Write.cpp +++ b/src/Processors/Formats/Impl/Parquet/Write.cpp @@ -916,7 +916,7 @@ void writeFileFooter(std::vector row_groups, SchemaElements sche meta.row_groups = std::move(row_groups); for (auto & r : meta.row_groups) meta.num_rows += r.num_rows; - meta.__set_created_by(VERSION_NAME " " VERSION_DESCRIBE); + meta.__set_created_by(std::string(VERSION_NAME) + " " + VERSION_DESCRIBE); if (options.write_page_statistics || options.write_column_chunk_statistics) { From a05091be04b179b3a046caee8a66260db3ffb901 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 13 Nov 2023 09:06:18 +0100 Subject: [PATCH 51/99] Miscellaneous --- base/base/wide_integer_impl.h | 10 +++++----- src/Common/Exception.h | 10 +++++----- src/Common/LoggingFormatStringHelpers.h | 8 ++++---- src/Common/formatIPv6.h | 2 +- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/base/base/wide_integer_impl.h b/base/base/wide_integer_impl.h index fc4e9e551ca..8d1da7de642 100644 --- a/base/base/wide_integer_impl.h +++ b/base/base/wide_integer_impl.h @@ -65,7 +65,7 @@ class IsTupleLike static void check(...); public: - static constexpr const bool value = !std::is_void(nullptr))>::value; + static constexpr const bool value = !std::is_void_v(nullptr))>; }; } @@ -79,7 +79,7 @@ class numeric_limits> { public: static constexpr bool is_specialized = true; - static constexpr bool is_signed = is_same::value; + static constexpr bool is_signed = is_same_v; static constexpr bool is_integer = true; static constexpr bool is_exact = true; static constexpr bool has_infinity = false; @@ -91,7 +91,7 @@ public: static constexpr bool is_iec559 = false; static constexpr bool is_bounded = true; static constexpr bool is_modulo = true; - static constexpr int digits = Bits - (is_same::value ? 1 : 0); + static constexpr int digits = Bits - (is_same_v ? 1 : 0); static constexpr int digits10 = digits * 0.30103 /*std::log10(2)*/; static constexpr int max_digits10 = 0; static constexpr int radix = 2; @@ -104,7 +104,7 @@ public: static constexpr wide::integer min() noexcept { - if (is_same::value) + if (is_same_v) { using T = wide::integer; T res{}; @@ -118,7 +118,7 @@ public: { using T = wide::integer; T res{}; - res.items[T::_impl::big(0)] = is_same::value + res.items[T::_impl::big(0)] = is_same_v ? std::numeric_limits::signed_base_type>::max() : std::numeric_limits::base_type>::max(); for (unsigned i = 1; i < wide::integer::_impl::item_count; ++i) diff --git a/src/Common/Exception.h b/src/Common/Exception.h index a7ffa8adcd0..ac116f5ceca 100644 --- a/src/Common/Exception.h +++ b/src/Common/Exception.h @@ -73,8 +73,8 @@ protected: struct MessageMasked { std::string msg; - MessageMasked(const std::string & msg_); - MessageMasked(std::string && msg_); + explicit MessageMasked(const std::string & msg_); + explicit MessageMasked(std::string && msg_); }; Exception(const MessageMasked & msg_masked, int code, bool remote_); @@ -123,7 +123,7 @@ public: Exception(CreateFromSTDTag, const std::exception & exc); Exception * clone() const override { return new Exception(*this); } - void rethrow() const override { throw *this; } + void rethrow() const override { throw *this; } // NOLINT const char * name() const noexcept override { return "DB::Exception"; } const char * what() const noexcept override { return message().data(); } @@ -181,7 +181,7 @@ public: : Exception(msg, code), saved_errno(saved_errno_), path(path_) {} ErrnoException * clone() const override { return new ErrnoException(*this); } - void rethrow() const override { throw *this; } + void rethrow() const override { throw *this; } // NOLINT int getErrno() const { return saved_errno; } std::optional getPath() const { return path; } @@ -219,7 +219,7 @@ public: void setFileName(const String & file_name_) { file_name = file_name_; } Exception * clone() const override { return new ParsingException(*this); } - void rethrow() const override { throw *this; } + void rethrow() const override { throw *this; } // NOLINT private: ssize_t line_number{-1}; diff --git a/src/Common/LoggingFormatStringHelpers.h b/src/Common/LoggingFormatStringHelpers.h index 06320509c17..ef7ec0c6144 100644 --- a/src/Common/LoggingFormatStringHelpers.h +++ b/src/Common/LoggingFormatStringHelpers.h @@ -106,8 +106,8 @@ template constexpr std::string_view tryGetStaticFormatString(T && x /// Most likely it was a string literal. /// Unfortunately, there's no good way to check if something is a string literal. /// But fmtlib requires a format string to be compile-time constant unless fmt::runtime is used. - static_assert(std::is_nothrow_convertible::value); - static_assert(!std::is_pointer::value); + static_assert(std::is_nothrow_convertible_v); + static_assert(!std::is_pointer_v); return std::string_view(x); } } @@ -127,8 +127,8 @@ template<> struct ConstexprIfsAreNotIfdefs { /// See tryGetStaticFormatString(...) static_assert(!std::is_same_v>); - static_assert(std::is_nothrow_convertible::value); - static_assert(!std::is_pointer::value); + static_assert(std::is_nothrow_convertible_v); + static_assert(!std::is_pointer_v); return std::string_view(x); } diff --git a/src/Common/formatIPv6.h b/src/Common/formatIPv6.h index be4dfc7391e..f2a9ee960f3 100644 --- a/src/Common/formatIPv6.h +++ b/src/Common/formatIPv6.h @@ -45,7 +45,7 @@ void formatIPv6(const unsigned char * src, char *& dst, uint8_t zeroed_tail_byte * @return - true if parsed successfully, false otherwise. */ template -requires (std::is_same::type, char>::value) +requires (std::is_same_v, char>) inline bool parseIPv4(T * &src, EOFfunction eof, unsigned char * dst, int32_t first_octet = -1) { if (src == nullptr || first_octet > 255) From 6add71bd414340532a0769b2c16a1ac98aba3d5a Mon Sep 17 00:00:00 2001 From: Jordi Villar Date: Sun, 12 Nov 2023 17:49:19 +0100 Subject: [PATCH 52/99] Continue with work from #56621 --- .../02912_ingestion_mv_deduplication.reference | 2 +- .../02912_ingestion_mv_deduplication.sql | 17 ++++++++--------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/tests/queries/0_stateless/02912_ingestion_mv_deduplication.reference b/tests/queries/0_stateless/02912_ingestion_mv_deduplication.reference index 946897a4fe3..335b55f05c8 100644 --- a/tests/queries/0_stateless/02912_ingestion_mv_deduplication.reference +++ b/tests/queries/0_stateless/02912_ingestion_mv_deduplication.reference @@ -1,4 +1,4 @@ --- Original issue with max_insert_delayed_streams_for_parallel_write = 1 +-- Original issue with max_insert_delayed_streams_for_parallel_write <= 1 -- Landing 2022-09-01 12:23:34 42 2023-09-01 12:23:34 42 diff --git a/tests/queries/0_stateless/02912_ingestion_mv_deduplication.sql b/tests/queries/0_stateless/02912_ingestion_mv_deduplication.sql index 68901b67c91..f206f0d7775 100644 --- a/tests/queries/0_stateless/02912_ingestion_mv_deduplication.sql +++ b/tests/queries/0_stateless/02912_ingestion_mv_deduplication.sql @@ -1,7 +1,7 @@ --- Tags: replica +-- Tags: zookeeper SET session_timezone = 'UTC'; -SELECT '-- Original issue with max_insert_delayed_streams_for_parallel_write = 1'; +SELECT '-- Original issue with max_insert_delayed_streams_for_parallel_write <= 1'; /* This is the expected behavior when mv deduplication is set to false. @@ -11,7 +11,7 @@ SELECT '-- Original issue with max_insert_delayed_streams_for_parallel_write = 1 - 2nd insert gets both blocks inserted in mv table */ -SET deduplicate_blocks_in_dependent_materialized_views = 0, max_insert_delayed_streams_for_parallel_write = 1; +SET deduplicate_blocks_in_dependent_materialized_views = 0, max_insert_delayed_streams_for_parallel_write = 0; CREATE TABLE landing ( @@ -48,7 +48,7 @@ SELECT '-- Original issue with deduplicate_blocks_in_dependent_materialized_view This is the unexpected behavior due to setting max_insert_delayed_streams_for_parallel_write > 1. This unexpected behavior was present since version 21.9 or earlier but due to this PR https://github.com/ClickHouse/ClickHouse/pull/34780 - when max_insert_delayed_streams_for_parallel_write setting it to 1 by default the issue was mitigated. + when max_insert_delayed_streams_for_parallel_write gets disabled by default the issue was mitigated. This is what happens: @@ -57,7 +57,7 @@ SELECT '-- Original issue with deduplicate_blocks_in_dependent_materialized_view - 2nd insert is not inserting anything in mv table due to a bug computing blocks to be discarded */ -SET deduplicate_blocks_in_dependent_materialized_views = 0, max_insert_delayed_streams_for_parallel_write = 10; +SET deduplicate_blocks_in_dependent_materialized_views = 0, max_insert_delayed_streams_for_parallel_write = 1000; CREATE TABLE landing ( @@ -85,14 +85,13 @@ SELECT * FROM landing FINAL ORDER BY time; SELECT '-- MV'; SELECT * FROM mv FINAL ORDER BY hour; -SET max_insert_delayed_streams_for_parallel_write = 1; DROP TABLE IF EXISTS landing SYNC; DROP TABLE IF EXISTS mv SYNC; SELECT '-- Original issue with deduplicate_blocks_in_dependent_materialized_views = 1 AND max_insert_delayed_streams_for_parallel_write > 1'; /* - By setting deduplicate_blocks_in_dependent_materialized_views = 1 we can make the code go through a different path getting an expected + By setting deduplicate_blocks_in_dependent_materialized_views = 1 we can make the code go through a different path getting an expected behavior again, even with max_insert_delayed_streams_for_parallel_write > 1. This is what happens now: @@ -101,7 +100,7 @@ SELECT '-- Original issue with deduplicate_blocks_in_dependent_materialized_view - 2nd insert gets first block 20220901 deduplicated and second one inserted for landing and mv tables */ -SET deduplicate_blocks_in_dependent_materialized_views = 1, max_insert_delayed_streams_for_parallel_write = 10; +SET deduplicate_blocks_in_dependent_materialized_views = 1, max_insert_delayed_streams_for_parallel_write = 1000; CREATE TABLE landing ( @@ -129,7 +128,6 @@ SELECT * FROM landing FINAL ORDER BY time; SELECT '-- MV'; SELECT * FROM mv FINAL ORDER BY hour; -SET max_insert_delayed_streams_for_parallel_write = 1; DROP TABLE IF EXISTS landing SYNC; DROP TABLE IF EXISTS mv SYNC; @@ -142,6 +140,7 @@ SELECT '-- Regression introduced in https://github.com/ClickHouse/ClickHouse/pul max_insert_delayed_streams_for_parallel_write > 1 but it ended up adding a new regression. */ +SET deduplicate_blocks_in_dependent_materialized_views = 0, max_insert_delayed_streams_for_parallel_write = 0; CREATE TABLE landing ( From 82c461031e70ce4309dec832cfd8dfee5fc3dd43 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 13 Nov 2023 10:09:23 +0100 Subject: [PATCH 53/99] Fix build --- programs/client/Client.cpp | 5 +---- programs/keeper/Keeper.cpp | 2 +- programs/server/Server.cpp | 2 +- src/Client/ClientBase.cpp | 2 +- src/Client/Connection.cpp | 2 +- src/Common/ClickHouseRevision.cpp | 2 +- src/Common/Exception.cpp | 2 +- src/Common/config_version.cpp.in | 2 +- src/Coordination/FourLetterCommand.h | 2 +- src/Daemon/BaseDaemon.cpp | 2 +- src/Daemon/SentryWriter.cpp | 2 +- src/Functions/serverConstants.cpp | 2 +- src/IO/ReadWriteBufferFromHTTP.h | 2 +- src/Interpreters/ClientInfo.cpp | 3 ++- src/Interpreters/CrashLog.cpp | 2 +- src/Processors/Formats/Impl/Parquet/Write.cpp | 3 +-- src/Processors/QueryPlan/DistributedCreateLocalPlan.cpp | 2 +- src/Server/MySQLHandler.cpp | 3 +-- src/Server/PostgreSQLHandler.cpp | 3 +-- src/Server/TCPHandler.cpp | 6 +++--- src/Storages/Kafka/StorageKafka.cpp | 6 +++--- 21 files changed, 26 insertions(+), 31 deletions(-) diff --git a/programs/client/Client.cpp b/programs/client/Client.cpp index 723707d9ec3..4e1b79e7c59 100644 --- a/programs/client/Client.cpp +++ b/programs/client/Client.cpp @@ -17,18 +17,15 @@ #include "Core/Protocol.h" #include "Parsers/formatAST.h" -#include - #include -#include "config_version.h" +#include #include #include #include #include #include -#include #include #include diff --git a/programs/keeper/Keeper.cpp b/programs/keeper/Keeper.cpp index 8ebaf865cf4..6df1bbaa329 100644 --- a/programs/keeper/Keeper.cpp +++ b/programs/keeper/Keeper.cpp @@ -35,7 +35,7 @@ #include "Core/Defines.h" #include "config.h" -#include "config_version.h" +#include #include "config_tools.h" diff --git a/programs/server/Server.cpp b/programs/server/Server.cpp index 85ae6d7796c..9e974e796e0 100644 --- a/programs/server/Server.cpp +++ b/programs/server/Server.cpp @@ -98,7 +98,7 @@ #include #include "config.h" -#include "config_version.h" +#include #if defined(OS_LINUX) # include diff --git a/src/Client/ClientBase.cpp b/src/Client/ClientBase.cpp index 9c7bfe5974f..28c95c653be 100644 --- a/src/Client/ClientBase.cpp +++ b/src/Client/ClientBase.cpp @@ -77,7 +77,7 @@ #include #include -#include "config_version.h" +#include #include "config.h" namespace fs = std::filesystem; diff --git a/src/Client/Connection.cpp b/src/Client/Connection.cpp index 7af9c99f50c..75ca66f2647 100644 --- a/src/Client/Connection.cpp +++ b/src/Client/Connection.cpp @@ -35,7 +35,7 @@ #include #include -#include "config_version.h" +#include #include "config.h" #if USE_SSL diff --git a/src/Common/ClickHouseRevision.cpp b/src/Common/ClickHouseRevision.cpp index 9dd91159f28..c7c27436466 100644 --- a/src/Common/ClickHouseRevision.cpp +++ b/src/Common/ClickHouseRevision.cpp @@ -1,5 +1,5 @@ #include -#include "config_version.h" +#include namespace ClickHouseRevision { diff --git a/src/Common/Exception.cpp b/src/Common/Exception.cpp index d9f515b38b1..ed9fb00241d 100644 --- a/src/Common/Exception.cpp +++ b/src/Common/Exception.cpp @@ -21,7 +21,7 @@ #include #include -#include "config_version.h" +#include namespace fs = std::filesystem; diff --git a/src/Common/config_version.cpp.in b/src/Common/config_version.cpp.in index 26ae0f0f4d7..eb9ceb800b9 100644 --- a/src/Common/config_version.cpp.in +++ b/src/Common/config_version.cpp.in @@ -1,6 +1,6 @@ /// This file was autogenerated by CMake -#include "config_version.h" +#include const unsigned VERSION_REVISION = @VERSION_REVISION@; const char * VERSION_NAME = "@VERSION_NAME@"; diff --git a/src/Coordination/FourLetterCommand.h b/src/Coordination/FourLetterCommand.h index bb3c616e080..4702dd10415 100644 --- a/src/Coordination/FourLetterCommand.h +++ b/src/Coordination/FourLetterCommand.h @@ -7,7 +7,7 @@ #include #include -#include "config_version.h" +#include namespace DB diff --git a/src/Daemon/BaseDaemon.cpp b/src/Daemon/BaseDaemon.cpp index dba4b13835d..8833156386f 100644 --- a/src/Daemon/BaseDaemon.cpp +++ b/src/Daemon/BaseDaemon.cpp @@ -60,7 +60,7 @@ #include #include -#include "config_version.h" +#include #if defined(OS_DARWIN) # pragma clang diagnostic ignored "-Wunused-macros" diff --git a/src/Daemon/SentryWriter.cpp b/src/Daemon/SentryWriter.cpp index 81ab103be02..d6e7144ca3b 100644 --- a/src/Daemon/SentryWriter.cpp +++ b/src/Daemon/SentryWriter.cpp @@ -14,7 +14,7 @@ #include #include "config.h" -#include "config_version.h" +#include #if USE_SENTRY && !defined(CLICKHOUSE_KEEPER_STANDALONE_BUILD) diff --git a/src/Functions/serverConstants.cpp b/src/Functions/serverConstants.cpp index 4294f97d771..9f1a3584df8 100644 --- a/src/Functions/serverConstants.cpp +++ b/src/Functions/serverConstants.cpp @@ -12,7 +12,7 @@ #include -#include "config_version.h" +#include namespace DB diff --git a/src/IO/ReadWriteBufferFromHTTP.h b/src/IO/ReadWriteBufferFromHTTP.h index 7385a4c89a2..8f0e2388e5b 100644 --- a/src/IO/ReadWriteBufferFromHTTP.h +++ b/src/IO/ReadWriteBufferFromHTTP.h @@ -23,7 +23,7 @@ #include #include #include "config.h" -#include "config_version.h" +#include #include diff --git a/src/Interpreters/ClientInfo.cpp b/src/Interpreters/ClientInfo.cpp index b478daabc08..347ec115aba 100644 --- a/src/Interpreters/ClientInfo.cpp +++ b/src/Interpreters/ClientInfo.cpp @@ -7,10 +7,11 @@ #include #include -#include "config_version.h" +#include #include + namespace DB { diff --git a/src/Interpreters/CrashLog.cpp b/src/Interpreters/CrashLog.cpp index ec693eb7931..6b966445580 100644 --- a/src/Interpreters/CrashLog.cpp +++ b/src/Interpreters/CrashLog.cpp @@ -8,7 +8,7 @@ #include #include -#include "config_version.h" +#include namespace DB diff --git a/src/Processors/Formats/Impl/Parquet/Write.cpp b/src/Processors/Formats/Impl/Parquet/Write.cpp index 92d9df9b391..82e761f43e2 100644 --- a/src/Processors/Formats/Impl/Parquet/Write.cpp +++ b/src/Processors/Formats/Impl/Parquet/Write.cpp @@ -6,14 +6,13 @@ #include #include #include -#include #include #include #include #include #include #include -#include "config_version.h" +#include #if USE_SNAPPY #include diff --git a/src/Processors/QueryPlan/DistributedCreateLocalPlan.cpp b/src/Processors/QueryPlan/DistributedCreateLocalPlan.cpp index bacfb7e352e..4d4bc6220e9 100644 --- a/src/Processors/QueryPlan/DistributedCreateLocalPlan.cpp +++ b/src/Processors/QueryPlan/DistributedCreateLocalPlan.cpp @@ -1,6 +1,6 @@ #include -#include "config_version.h" +#include #include #include #include diff --git a/src/Server/MySQLHandler.cpp b/src/Server/MySQLHandler.cpp index 21fa7f7227a..10b520ca97a 100644 --- a/src/Server/MySQLHandler.cpp +++ b/src/Server/MySQLHandler.cpp @@ -25,8 +25,7 @@ #include #include #include - -#include "config_version.h" +#include #if USE_SSL # include diff --git a/src/Server/PostgreSQLHandler.cpp b/src/Server/PostgreSQLHandler.cpp index 3956f795657..eeb3784c1df 100644 --- a/src/Server/PostgreSQLHandler.cpp +++ b/src/Server/PostgreSQLHandler.cpp @@ -11,8 +11,7 @@ #include #include #include - -#include "config_version.h" +#include #if USE_SSL # include diff --git a/src/Server/TCPHandler.cpp b/src/Server/TCPHandler.cpp index 1da9806b4f5..f929d0f5ff9 100644 --- a/src/Server/TCPHandler.cpp +++ b/src/Server/TCPHandler.cpp @@ -59,11 +59,11 @@ # include #endif -#include "Core/Protocol.h" -#include "Storages/MergeTree/RequestResponse.h" +#include +#include #include "TCPHandler.h" -#include "config_version.h" +#include using namespace std::literals; using namespace DB; diff --git a/src/Storages/Kafka/StorageKafka.cpp b/src/Storages/Kafka/StorageKafka.cpp index 423d295cdf2..c17defca673 100644 --- a/src/Storages/Kafka/StorageKafka.cpp +++ b/src/Storages/Kafka/StorageKafka.cpp @@ -41,11 +41,11 @@ #include #include -#include "Storages/ColumnDefault.h" -#include "config_version.h" - +#include +#include #include #include + #if USE_KRB5 #include #endif // USE_KRB5 From 0f96df582f9d0e1458ea08daba517d37f617aa78 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 13 Nov 2023 11:04:50 +0100 Subject: [PATCH 54/99] Remove the test --- ...ry_profiler_concurrency_overruns.reference | 1 - ...907_query_profiler_concurrency_overruns.sh | 21 ------------------- 2 files changed, 22 deletions(-) delete mode 100644 tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference delete mode 100755 tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh diff --git a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference deleted file mode 100644 index 45d53fbec54..00000000000 --- a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.reference +++ /dev/null @@ -1 +0,0 @@ -1000000 1 1 diff --git a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh b/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh deleted file mode 100755 index 7c5e4209124..00000000000 --- a/tests/queries/0_stateless/02907_query_profiler_concurrency_overruns.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env bash -# Tags: no-tsan, no-asan, no-ubsan, no-msan, no-debug, no-fasttest, no-cpu-aarch64 - -CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -# shellcheck source=../shell_config.sh -. "$CUR_DIR"/../shell_config.sh - -# The check is probablistic, so make sure that it passes at least sometimes: - -while true -do - ${CLICKHOUSE_CLIENT} -n --query=" - SELECT count() FROM zeros_mt(1000000) SETTINGS - query_profiler_real_time_period_ns = 1000000, - query_profiler_cpu_time_period_ns = 1000000, - max_threads = 1000, - max_block_size = 100; - SELECT anyIf(value, event = 'QueryProfilerRuns') > 0, anyIf(value, event = 'QueryProfilerConcurrencyOverruns') > 0 FROM system.events; - " | tr '\t\n' ' ' | grep '1000000 1 1' && break - sleep 1 -done From 7864df48266e9db3f671e9cd09e81b96485debc3 Mon Sep 17 00:00:00 2001 From: Robert Schulze Date: Mon, 13 Nov 2023 10:56:26 +0000 Subject: [PATCH 55/99] Update docs + Try to stabilize test results, pt. II --- docs/en/operations/settings/settings.md | 6 +++--- .../02494_query_cache_nondeterministic_functions.sql | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/en/operations/settings/settings.md b/docs/en/operations/settings/settings.md index 67b62501dd9..d0acad7b557 100644 --- a/docs/en/operations/settings/settings.md +++ b/docs/en/operations/settings/settings.md @@ -1657,14 +1657,14 @@ Possible values: Default value: `1`. -## query_cache_non_deterministic_function_handling {#query-cache-nondeterministic-function-handling} +## query_cache_nondeterministic_function_handling {#query-cache-nondeterministic-function-handling} Controls how the [query cache](../query-cache.md) handles `SELECT` queries with non-deterministic functions like `rand()` or `now()`. Possible values: -- `'throw'` - Throw an exception. -- `'save'` - Cache the query result even if it is non-deterministic. +- `'throw'` - Throw an exception and don't cache the query result. +- `'save'` - Cache the query result. - `'ignore'` - Don't cache the query result and don't throw an exception. Default value: `throw`. diff --git a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql index 1192a19e26b..477655e474f 100644 --- a/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql +++ b/tests/queries/0_stateless/02494_query_cache_nondeterministic_functions.sql @@ -1,5 +1,5 @@ --- Tag no-parallel: Messes with internal cache -- Tags: no-parallel +-- Tag no-parallel: Messes with internal cache SYSTEM DROP QUERY CACHE; From ed50cb61f87141da2ca81da32d49776616a3234c Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 13 Nov 2023 12:23:55 +0100 Subject: [PATCH 56/99] Fix style --- src/Common/config_version.h | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/Common/config_version.h b/src/Common/config_version.h index b1e0ea67a68..e3ec12e2b34 100644 --- a/src/Common/config_version.h +++ b/src/Common/config_version.h @@ -1,5 +1,3 @@ -/// This file was autogenerated by CMake - #pragma once /// These fields are changing only on every release, but we still don't want to have them in the header file, From dc6a61ee8fe212b1d465d92fed75d8de3221ef6d Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Thu, 9 Nov 2023 10:54:12 +0100 Subject: [PATCH 57/99] Add reusable installation workflow --- .github/workflows/reusable_install_test.yml | 64 +++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 .github/workflows/reusable_install_test.yml diff --git a/.github/workflows/reusable_install_test.yml b/.github/workflows/reusable_install_test.yml new file mode 100644 index 00000000000..5d1a0509f64 --- /dev/null +++ b/.github/workflows/reusable_install_test.yml @@ -0,0 +1,64 @@ +### For the pure soul wishes to move it to another place +# https://github.com/orgs/community/discussions/9050 + +name: Test installation +'on': + workflow_call: + inputs: + test_name: + description: the value of test type from tests/ci/ci_config.py + required: true + type: string + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + runner_type: + description: the label of runner to use + default: style-checker + type: string + additional_envs: + description: additional ENV variables to setup the job + type: string + +jobs: + InstallCheck: + name: ${{inputs.test_name}} + runs-on: [self-hosted, '${{inputs.runner_type}}'] + steps: + - name: Check out repository code + uses: ClickHouse/checkout@v1 + with: + clear-repository: true + submodules: true + fetch-depth: ${{inputs.checkout_depth}} + filter: tree:0 + - name: Set build envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + REPORTS_PATH=${{runner.temp}}/reports_dir + GITHUB_JOB_OVERRIDDEN=${{inputs.test_name}} + CHECK_NAME=${{inputs.test_name}} + ${{inputs.additional_envs}} + EOF + - name: Common setup + uses: ./.github/actions/common_setup + with: + job_type: test_install + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Build + run: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" + - name: Upload build URLs to artifacts + if: ${{ success() || failure() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ env.BUILD_URLS }} + path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json + - name: Clean + uses: ./.github/actions/clean From b1db60abc25e03b1bd80089242ac3d5683902857 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Thu, 9 Nov 2023 10:55:16 +0100 Subject: [PATCH 58/99] Move IMAGES_PATH and REPORTS_PATH to the common_setup --- .github/actions/common_setup/action.yml | 2 ++ .github/workflows/reusable_build.yml | 1 - .github/workflows/reusable_install_test.yml | 1 - 3 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/common_setup/action.yml b/.github/actions/common_setup/action.yml index 0d31945087d..b02413adc44 100644 --- a/.github/actions/common_setup/action.yml +++ b/.github/actions/common_setup/action.yml @@ -19,6 +19,8 @@ runs: cat >> "$GITHUB_ENV" << 'EOF' TEMP_PATH=${{runner.temp}}/${{inputs.job_type}} REPO_COPY=${{runner.temp}}/${{inputs.job_type}}/git-repo-copy + IMAGES_PATH=${{runner.temp}}/images_path + REPORTS_PATH=${{runner.temp}}/reports_dir EOF if [ -z "${{env.GITHUB_JOB_OVERRIDDEN}}" ] && [ "true" == "${{inputs.nested_job}}" ]; then echo "The GITHUB_JOB_OVERRIDDEN ENV is unset, and must be set for the nested jobs" diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 1eb25307f0c..f6586016874 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -37,7 +37,6 @@ jobs: - name: Set build envs run: | cat >> "$GITHUB_ENV" << 'EOF' - IMAGES_PATH=${{runner.temp}}/images_path GITHUB_JOB_OVERRIDDEN=Build-${{inputs.build_name}} ${{inputs.additional_envs}} EOF diff --git a/.github/workflows/reusable_install_test.yml b/.github/workflows/reusable_install_test.yml index 5d1a0509f64..d84bf5d34f7 100644 --- a/.github/workflows/reusable_install_test.yml +++ b/.github/workflows/reusable_install_test.yml @@ -37,7 +37,6 @@ jobs: - name: Set build envs run: | cat >> "$GITHUB_ENV" << 'EOF' - REPORTS_PATH=${{runner.temp}}/reports_dir GITHUB_JOB_OVERRIDDEN=${{inputs.test_name}} CHECK_NAME=${{inputs.test_name}} ${{inputs.additional_envs}} From 45e12bef4ef19048dd004d98f2501939c183d4ea Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Thu, 9 Nov 2023 10:59:29 +0100 Subject: [PATCH 59/99] Use reusable_install_test.yml --- .github/workflows/backport_branches.yml | 70 +++---------------------- .github/workflows/master.yml | 70 +++---------------------- .github/workflows/pull_request.yml | 70 +++---------------------- .github/workflows/release_branches.yml | 70 +++---------------------- 4 files changed, 32 insertions(+), 248 deletions(-) diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 7611c5429c5..9bd55c51438 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -322,70 +322,16 @@ jobs: ############################################################################################ InstallPackagesTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_install_test.yml + with: + test_name: Install packages (amd64) + runner_type: style-checker InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_install_test.yml + with: + test_name: Install packages (arm64) + runner_type: style-checker-aarch64 ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 5a0fc2fabcb..daac593b20c 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -431,70 +431,16 @@ jobs: ############################################################################################ InstallPackagesTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_install_test.yml + with: + test_name: Install packages (amd64) + runner_type: style-checker InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_install_test.yml + with: + test_name: Install packages (arm64) + runner_type: style-checker-aarch64 ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 66a0b186743..0b4cbc826f5 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -467,70 +467,16 @@ jobs: ############################################################################################ InstallPackagesTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_install_test.yml + with: + test_name: Install packages (amd64) + runner_type: style-checker InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_install_test.yml + with: + test_name: Install packages (arm64) + runner_type: style-checker-aarch64 ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 29776d0aa5c..6e890149e48 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -322,70 +322,16 @@ jobs: ############################################################################################ InstallPackagesTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (amd64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_install_test.yml + with: + test_name: Install packages (amd64) + runner_type: style-checker InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/test_install - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Install packages (arm64) - REPO_COPY=${{runner.temp}}/test_install/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Test packages installation - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_install_test.yml + with: + test_name: Install packages (arm64) + runner_type: style-checker-aarch64 ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## From 2aec201c78236d296f3c2f0853e18cc4794489b2 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Thu, 9 Nov 2023 12:21:47 +0100 Subject: [PATCH 60/99] Rewrite reusable_install_test to universal reusable_test --- .github/workflows/backport_branches.yml | 10 ++- .github/workflows/master.yml | 10 ++- .github/workflows/pull_request.yml | 10 ++- .github/workflows/release_branches.yml | 10 ++- .github/workflows/reusable_install_test.yml | 63 -------------- .github/workflows/reusable_test.yml | 93 +++++++++++++++++++++ 6 files changed, 125 insertions(+), 71 deletions(-) delete mode 100644 .github/workflows/reusable_install_test.yml create mode 100644 .github/workflows/reusable_test.yml diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 9bd55c51438..e93bb83b081 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -322,16 +322,22 @@ jobs: ############################################################################################ InstallPackagesTestRelease: needs: [BuilderDebRelease] - uses: ./.github/workflows/reusable_install_test.yml + uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (amd64) runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - uses: ./.github/workflows/reusable_install_test.yml + uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (arm64) runner_type: style-checker-aarch64 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index daac593b20c..6cd94e9aa42 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -431,16 +431,22 @@ jobs: ############################################################################################ InstallPackagesTestRelease: needs: [BuilderDebRelease] - uses: ./.github/workflows/reusable_install_test.yml + uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (amd64) runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - uses: ./.github/workflows/reusable_install_test.yml + uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (arm64) runner_type: style-checker-aarch64 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 0b4cbc826f5..711e524acc5 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -467,16 +467,22 @@ jobs: ############################################################################################ InstallPackagesTestRelease: needs: [BuilderDebRelease] - uses: ./.github/workflows/reusable_install_test.yml + uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (amd64) runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - uses: ./.github/workflows/reusable_install_test.yml + uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (arm64) runner_type: style-checker-aarch64 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 6e890149e48..5e8530bcaae 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -322,16 +322,22 @@ jobs: ############################################################################################ InstallPackagesTestRelease: needs: [BuilderDebRelease] - uses: ./.github/workflows/reusable_install_test.yml + uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (amd64) runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" InstallPackagesTestAarch64: needs: [BuilderDebAarch64] - uses: ./.github/workflows/reusable_install_test.yml + uses: ./.github/workflows/reusable_test.yml with: test_name: Install packages (arm64) runner_type: style-checker-aarch64 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 install_check.py "$CHECK_NAME" ############################################################################################## ########################### FUNCTIONAl STATELESS TESTS ####################################### ############################################################################################## diff --git a/.github/workflows/reusable_install_test.yml b/.github/workflows/reusable_install_test.yml deleted file mode 100644 index d84bf5d34f7..00000000000 --- a/.github/workflows/reusable_install_test.yml +++ /dev/null @@ -1,63 +0,0 @@ -### For the pure soul wishes to move it to another place -# https://github.com/orgs/community/discussions/9050 - -name: Test installation -'on': - workflow_call: - inputs: - test_name: - description: the value of test type from tests/ci/ci_config.py - required: true - type: string - checkout_depth: - description: the value of the git shallow checkout - required: false - type: number - default: 1 - runner_type: - description: the label of runner to use - default: style-checker - type: string - additional_envs: - description: additional ENV variables to setup the job - type: string - -jobs: - InstallCheck: - name: ${{inputs.test_name}} - runs-on: [self-hosted, '${{inputs.runner_type}}'] - steps: - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - fetch-depth: ${{inputs.checkout_depth}} - filter: tree:0 - - name: Set build envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - GITHUB_JOB_OVERRIDDEN=${{inputs.test_name}} - CHECK_NAME=${{inputs.test_name}} - ${{inputs.additional_envs}} - EOF - - name: Common setup - uses: ./.github/actions/common_setup - with: - job_type: test_install - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Build - run: | - cd "$REPO_COPY/tests/ci" - python3 install_check.py "$CHECK_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Clean - uses: ./.github/actions/clean diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml new file mode 100644 index 00000000000..19939d114f2 --- /dev/null +++ b/.github/workflows/reusable_test.yml @@ -0,0 +1,93 @@ +### For the pure soul wishes to move it to another place +# https://github.com/orgs/community/discussions/9050 + +name: Testing workflow +'on': + workflow_call: + inputs: + test_name: + description: the value of test type from tests/ci/ci_config.py, ends up as $CHECK_NAME ENV + required: true + type: string + runner_type: + description: the label of runner to use + required: true + type: string + run_command: + description: the command to launch the check. Usually starts with `cd '$REPO_COPY/tests/ci'` + required: true + type: string + batches: + description: how many batches for the test will be launched + default: 1 + type: number + checkout_depth: + description: the value of the git shallow checkout + required: false + type: number + default: 1 + submodules: + description: if the submodules should be checked out + required: false + type: boolean + default: false + additional_envs: + description: additional ENV variables to setup the job + type: string + +env: + CHECK_NAME: ${{inputs.test_name}} + +jobs: + PrepareStrategy: + if: ${{inputs.batches > 0}} # batches < 1 is misconfiguration + runs-on: [self-hosted, style-checker-aarch64] + outputs: + batches: ${{steps.batches.outputs.batches}} + steps: + - name: Calculate batches + id: batches + run: | + batches_output=$(python3 -c 'import json; print(json.dumps(list(range(${{inputs.batches}}))))') + echo "batches=${batches_output}" >> "$GITHUB_OUTPUT" + Test: + name: ${{inputs.test_name}}-${{matrix.batch}} + runs-on: [self-hosted, '${{inputs.runner_type}}'] + needs: [PrepareStrategy] + strategy: + fail-fast: false # we always wait for entire matrix + matrix: + batch: ${{ fromJson(needs.PrepareStrategy.outputs.batches) }} + steps: + - name: Check out repository code + uses: ClickHouse/checkout@v1 + with: + clear-repository: true + submodules: ${{inputs.submodules}} + fetch-depth: ${{inputs.checkout_depth}} + filter: tree:0 + - name: Set build envs + run: | + cat >> "$GITHUB_ENV" << 'EOF' + GITHUB_JOB_OVERRIDDEN=${{inputs.test_name}}-${{matrix.batch}} + ${{inputs.additional_envs}} + EOF + - name: Common setup + uses: ./.github/actions/common_setup + with: + job_type: test + - name: Download json reports + uses: actions/download-artifact@v3 + with: + path: ${{ env.REPORTS_PATH }} + - name: Setup batch + if: ${{ inputs.batches > 1}} + run: | + cat >> "$GITHUB_ENV" << 'EOF' + RUN_BY_HASH_NUM=${{matrix.batch}} + RUN_BY_HASH_TOTAL=${{inputs.batches}} + EOF + - name: Run test + run: ${{inputs.run_command}} + - name: Clean + uses: ./.github/actions/clean From b69a2608c472e7359dd580dfcdadfdcf4ae10033 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Thu, 9 Nov 2023 16:04:18 +0100 Subject: [PATCH 61/99] Rewrite functional test to use reusable_test.yml --- .github/workflows/backport_branches.yml | 84 +- .github/workflows/master.yml | 1576 ++------------- .github/workflows/pull_request.yml | 2368 +++-------------------- .github/workflows/release_branches.yml | 871 ++------- 4 files changed, 676 insertions(+), 4223 deletions(-) diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index e93bb83b081..02a5ee00e4c 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -343,75 +343,31 @@ jobs: ############################################################################################## FunctionalStatelessTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ######################################### STRESS TESTS ####################################### ############################################################################################## diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 6cd94e9aa42..77d9ac58b06 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -452,1389 +452,223 @@ jobs: ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestReleaseDatabaseOrdinary: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release_database_ordinary - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseOrdinary) - REPO_COPY=${{runner.temp}}/stateless_release_database_ordinary/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, DatabaseOrdinary) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestReleaseDatabaseReplicated: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated1: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, DatabaseReplicated) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestReleaseS3: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_0: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, s3 storage) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestReleaseAnalyzer: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_analyzer - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, analyzer) - REPO_COPY=${{runner.temp}}/stateless_analyzer/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, analyzer) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan1: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan3: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan4: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan5: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug3: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug4: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ######################################### STRESS TESTS ####################################### ############################################################################################## @@ -3474,39 +2308,19 @@ jobs: - BuilderReport - BuilderSpecialReport - MarkReleaseReady - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 - - FunctionalStatelessTestDebug3 - - FunctionalStatelessTestDebug4 + - FunctionalStatelessTestDebug - FunctionalStatelessTestRelease - FunctionalStatelessTestReleaseDatabaseOrdinary - - FunctionalStatelessTestReleaseDatabaseReplicated0 - - FunctionalStatelessTestReleaseDatabaseReplicated1 - - FunctionalStatelessTestReleaseDatabaseReplicated2 - - FunctionalStatelessTestReleaseDatabaseReplicated3 + - FunctionalStatelessTestReleaseDatabaseReplicated + - FunctionalStatelessTestReleaseAnalyzer + - FunctionalStatelessTestReleaseS3 - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestAsan2 - - FunctionalStatelessTestAsan3 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestTsan3 - - FunctionalStatelessTestTsan4 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 - - FunctionalStatelessTestMsan3 - - FunctionalStatelessTestMsan4 - - FunctionalStatelessTestMsan5 - - FunctionalStatelessTestUBsan0 - - FunctionalStatelessTestUBsan1 + - FunctionalStatelessTestAsan + - FunctionalStatelessTestTsan + - FunctionalStatelessTestMsan + - FunctionalStatelessTestUBsan - FunctionalStatefulTestDebug - FunctionalStatefulTestRelease - - FunctionalStatelessTestReleaseS3_0 - - FunctionalStatelessTestReleaseS3_1 - FunctionalStatefulTestAarch64 - FunctionalStatefulTestAsan - FunctionalStatefulTestTsan diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 711e524acc5..e8c31c830cc 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -488,2075 +488,355 @@ jobs: ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestReleaseDatabaseReplicated: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseDatabaseReplicated3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_database_replicated - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, DatabaseReplicated) - REPO_COPY=${{runner.temp}}/stateless_database_replicated/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, DatabaseReplicated) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestReleaseWideParts: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_wide_parts - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, wide parts enabled) - REPO_COPY=${{runner.temp}}/stateless_wide_parts/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, wide parts enabled) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestReleaseAnalyzer: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_analyzer - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, analyzer) - REPO_COPY=${{runner.temp}}/stateless_analyzer/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, analyzer) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestReleaseS3: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestReleaseS3_1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release, s3 storage) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestS3Debug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug3: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug4: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Debug5: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (debug, s3 storage) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestS3Tsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestS3Tsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_s3_storage_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan, s3 storage) - REPO_COPY=${{runner.temp}}/stateless_s3_storage_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (tsan, s3 storage) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestMsan: + needs: [BuilderDebMsan] + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestUBsan1: - needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan3: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan4: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan5: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug3: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug4: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=5 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestFlakyCheck: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_flaky_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests flaky check (asan) - REPO_COPY=${{runner.temp}}/stateless_flaky_asan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests flaky check (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" TestsBugfixCheck: needs: [CheckLabels, StyleCheck] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/tests_bugfix_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=tests bugfix validate check - KILL_TIMEOUT=3600 - REPO_COPY=${{runner.temp}}/tests_bugfix_check/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Bugfix test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: tests bugfix validate check + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" - TEMP_PATH="${TEMP_PATH}/integration" \ - REPORTS_PATH="${REPORTS_PATH}/integration" \ - python3 integration_test_check.py "Integration $CHECK_NAME" \ - --validate-bugfix --post-commit-status=file || echo 'ignore exit code' + TEMP_PATH="${TEMP_PATH}/integration" \ + REPORTS_PATH="${REPORTS_PATH}/integration" \ + python3 integration_test_check.py "Integration $CHECK_NAME" \ + --validate-bugfix --post-commit-status=file || echo 'ignore exit code' - TEMP_PATH="${TEMP_PATH}/stateless" \ - REPORTS_PATH="${REPORTS_PATH}/stateless" \ - python3 functional_test_check.py "Stateless $CHECK_NAME" "$KILL_TIMEOUT" \ - --validate-bugfix --post-commit-status=file || echo 'ignore exit code' + TEMP_PATH="${TEMP_PATH}/stateless" \ + REPORTS_PATH="${REPORTS_PATH}/stateless" \ + python3 functional_test_check.py "Stateless $CHECK_NAME" "$KILL_TIMEOUT" \ + --validate-bugfix --post-commit-status=file || echo 'ignore exit code' - python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/functional_commit_status.tsv" "${TEMP_PATH}/integration/integration_commit_status.tsv" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/functional_commit_status.tsv" "${TEMP_PATH}/integration/integration_commit_status.tsv" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" # Parallel replicas FunctionalStatefulTestDebugParallelReplicas: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestUBsanParallelReplicas: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (ubsan, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestMsanParallelReplicas: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (msan, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestTsanParallelReplicas: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (tsan, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAsanParallelReplicas: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (asan, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestReleaseParallelReplicas: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release, ParallelReplicas) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (release, ParallelReplicas) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ######################################### STRESS TESTS ####################################### ############################################################################################## @@ -4378,36 +2658,16 @@ jobs: - BuilderReport - BuilderSpecialReport - FastTest - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 - - FunctionalStatelessTestDebug3 - - FunctionalStatelessTestDebug4 + - FunctionalStatelessTestDebug - FunctionalStatelessTestRelease - - FunctionalStatelessTestReleaseDatabaseReplicated0 - - FunctionalStatelessTestReleaseDatabaseReplicated1 - - FunctionalStatelessTestReleaseDatabaseReplicated2 - - FunctionalStatelessTestReleaseDatabaseReplicated3 + - FunctionalStatelessTestReleaseDatabaseReplicated - FunctionalStatelessTestReleaseWideParts - FunctionalStatelessTestReleaseAnalyzer - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestAsan2 - - FunctionalStatelessTestAsan3 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestTsan3 - - FunctionalStatelessTestTsan4 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 - - FunctionalStatelessTestMsan3 - - FunctionalStatelessTestMsan4 - - FunctionalStatelessTestMsan5 - - FunctionalStatelessTestUBsan0 - - FunctionalStatelessTestUBsan1 + - FunctionalStatelessTestAsan + - FunctionalStatelessTestTsan + - FunctionalStatelessTestMsan + - FunctionalStatelessTestUBsan - FunctionalStatefulTestDebug - FunctionalStatefulTestRelease - FunctionalStatefulTestAarch64 @@ -4415,17 +2675,15 @@ jobs: - FunctionalStatefulTestTsan - FunctionalStatefulTestMsan - FunctionalStatefulTestUBsan - - FunctionalStatelessTestReleaseS3_0 - - FunctionalStatelessTestReleaseS3_1 - - FunctionalStatelessTestS3Debug0 - - FunctionalStatelessTestS3Debug1 - - FunctionalStatelessTestS3Debug2 - - FunctionalStatelessTestS3Debug4 - - FunctionalStatelessTestS3Debug5 - - FunctionalStatelessTestS3Tsan0 - - FunctionalStatelessTestS3Tsan1 - - FunctionalStatelessTestS3Tsan2 - - FunctionalStatelessTestS3Tsan4 + - FunctionalStatelessTestReleaseS3 + - FunctionalStatelessTestS3Debug + - FunctionalStatelessTestS3Tsan + - FunctionalStatefulTestReleaseParallelReplicas + - FunctionalStatefulTestAsanParallelReplicas + - FunctionalStatefulTestTsanParallelReplicas + - FunctionalStatefulTestMsanParallelReplicas + - FunctionalStatefulTestUBsanParallelReplicas + - FunctionalStatefulTestDebugParallelReplicas - StressTestDebug - StressTestAsan - StressTestTsan diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 5e8530bcaae..81aad9ead13 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -343,743 +343,175 @@ jobs: ############################################################################################## FunctionalStatelessTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (release) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (aarch64) - REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (asan) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (tsan) - REPO_COPY=${{runner.temp}}/stateless_tsan/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatelessTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (ubsan) - REPO_COPY=${{runner.temp}}/stateless_ubsan/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 2 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan1: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestMsan2: - needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (msan) - REPO_COPY=${{runner.temp}}/stateless_memory/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" + FunctionalStatelessTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug1: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - FunctionalStatelessTestDebug2: - needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateless_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateless tests (debug) - REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse - KILL_TIMEOUT=10800 - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateless tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 5 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ############################ FUNCTIONAl STATEFUL TESTS ####################################### ############################################################################################## FunctionalStatefulTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (release) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (aarch64) - REPO_COPY=${{runner.temp}}/stateful_release/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (aarch64) + runner_type: func-tester-aarch64 + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (asan) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (asan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (tsan) - REPO_COPY=${{runner.temp}}/stateful_tsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (tsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (msan) - REPO_COPY=${{runner.temp}}/stateful_msan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (msan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (ubsan) - REPO_COPY=${{runner.temp}}/stateful_ubsan/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (ubsan) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" FunctionalStatefulTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stateful_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stateful tests (debug) - REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse - KILL_TIMEOUT=3600 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Functional test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stateful tests (debug) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=3600 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" ############################################################################################## ######################################### STRESS TESTS ####################################### ############################################################################################## @@ -1577,19 +1009,12 @@ jobs: - BuilderReport - BuilderSpecialReport - MarkReleaseReady - - FunctionalStatelessTestDebug0 - - FunctionalStatelessTestDebug1 - - FunctionalStatelessTestDebug2 + - FunctionalStatelessTestDebug - FunctionalStatelessTestRelease - FunctionalStatelessTestAarch64 - - FunctionalStatelessTestAsan0 - - FunctionalStatelessTestAsan1 - - FunctionalStatelessTestTsan0 - - FunctionalStatelessTestTsan1 - - FunctionalStatelessTestTsan2 - - FunctionalStatelessTestMsan0 - - FunctionalStatelessTestMsan1 - - FunctionalStatelessTestMsan2 + - FunctionalStatelessTestAsan + - FunctionalStatelessTestTsan + - FunctionalStatelessTestMsan - FunctionalStatelessTestUBsan - FunctionalStatefulTestDebug - FunctionalStatefulTestRelease From 449555777400755bcbbdb7d134322ba1e7088dae Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Fri, 10 Nov 2023 12:38:52 +0100 Subject: [PATCH 62/99] Rewrite stress and upgrade checks --- .github/workflows/backport_branches.yml | 43 +-- .github/workflows/master.yml | 199 +++---------- .github/workflows/pull_request.yml | 366 +++++------------------- .github/workflows/release_branches.yml | 199 +++---------- 4 files changed, 167 insertions(+), 640 deletions(-) diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 02a5ee00e4c..d7fe66472d0 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -373,41 +373,14 @@ jobs: ############################################################################################## StressTestTsan: needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (tsan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 77d9ac58b06..d963de094a5 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -674,173 +674,54 @@ jobs: ############################################################################################## StressTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (asan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (asan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestTsan: needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (tsan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (msan) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (msan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (ubsan) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (ubsan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (debug) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index e8c31c830cc..a048d052178 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -842,309 +842,97 @@ jobs: ############################################################################################## StressTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (asan) - REPO_COPY=${{runner.temp}}/stress_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (asan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestTsan: needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (tsan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (msan) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (msan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (ubsan) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (ubsan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - ############################################################################################## - ######################################### UPGRADE CHECK ###################################### - ############################################################################################## + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (debug) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" +############################################################################################## +######################################### UPGRADE CHECK ###################################### +############################################################################################## UpgradeCheckAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (asan) - REPO_COPY=${{runner.temp}}/upgrade_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Upgrade check (asan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 upgrade_check.py "$CHECK_NAME" UpgradeCheckTsan: needs: [BuilderDebTsan] - # same as for stress test with tsan - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (tsan) - REPO_COPY=${{runner.temp}}/upgrade_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Upgrade check (tsan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 upgrade_check.py "$CHECK_NAME" UpgradeCheckMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (msan) - REPO_COPY=${{runner.temp}}/upgrade_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Upgrade check (msan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 upgrade_check.py "$CHECK_NAME" UpgradeCheckDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/upgrade_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Upgrade check (debug) - REPO_COPY=${{runner.temp}}/upgrade_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Upgrade check - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 upgrade_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Upgrade check (debug) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 upgrade_check.py "$CHECK_NAME" ############################################################################################## ##################################### AST FUZZERS ############################################ ############################################################################################## @@ -2689,6 +2477,10 @@ jobs: - StressTestTsan - StressTestMsan - StressTestUBsan + - UpgradeCheckAsan + - UpgradeCheckTsan + - UpgradeCheckMsan + - UpgradeCheckDebug - ASTFuzzerTestDebug - ASTFuzzerTestAsan - ASTFuzzerTestTsan diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 81aad9ead13..77e43656fd3 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -517,173 +517,54 @@ jobs: ############################################################################################## StressTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (asan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (asan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestTsan: needs: [BuilderDebTsan] - # func testers have 16 cores + 128 GB memory - # while stress testers have 36 cores + 72 memory - # It would be better to have something like 32 + 128, - # but such servers almost unavailable as spot instances. - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_thread - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (tsan) - REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (tsan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_memory - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (msan) - REPO_COPY=${{runner.temp}}/stress_memory/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (msan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_undefined - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (ubsan) - REPO_COPY=${{runner.temp}}/stress_undefined/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (ubsan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" StressTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/stress_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Stress test (debug) - REPO_COPY=${{runner.temp}}/stress_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Stress test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 stress_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Stress test (debug) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 stress_check.py "$CHECK_NAME" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# From e07de85497b1ea702fc010a50e9ed064277f13db Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Fri, 10 Nov 2023 12:50:03 +0100 Subject: [PATCH 63/99] Rewrite AST fuzzers and performance checks --- .github/workflows/master.yml | 497 ++++------------------------ .github/workflows/pull_request.yml | 501 ++++------------------------- 2 files changed, 120 insertions(+), 878 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index d963de094a5..aa45970a965 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -1500,169 +1500,54 @@ jobs: ############################################################################################## ASTFuzzerTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (asan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (asan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (tsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (tsan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestUBSan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (ubsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (ubsan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestMSan: needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (msan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (msan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (debug) - REPO_COPY=${{runner.temp}}/ast_fuzzer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (debug) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ############################################################################################# #################################### UNIT TESTS ############################################# ############################################################################################# @@ -1834,286 +1719,26 @@ jobs: ############################################################################################# #################################### PERFORMANCE TESTS ###################################### ############################################################################################# - PerformanceComparisonX86-0: + PerformanceComparisonX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Performance Comparison + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 performance_comparison_check.py "$CHECK_NAME" + PerformanceComparisonAarch: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-1: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-2: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-3: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Performance Comparison Aarch64 + runner_type: func-tester-aarch64 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 performance_comparison_check.py "$CHECK_NAME" ############################################################################################## ###################################### SQLANCER FUZZERS ###################################### ############################################################################################## @@ -2234,10 +1859,8 @@ jobs: - IntegrationTestsTsan3 - IntegrationTestsTsan4 - IntegrationTestsTsan5 - - PerformanceComparisonX86-0 - - PerformanceComparisonX86-1 - - PerformanceComparisonX86-2 - - PerformanceComparisonX86-3 + - PerformanceComparisonX86 + - PerformanceComparisonAarch - CompatibilityCheckX86 - CompatibilityCheckAarch64 - ASTFuzzerTestDebug diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index a048d052178..563131ceec3 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -938,169 +938,54 @@ jobs: ############################################################################################## ASTFuzzerTestAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (asan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (asan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (tsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (tsan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestUBSan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (ubsan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (ubsan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestMSan: needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (msan) - REPO_COPY=${{runner.temp}}/ast_fuzzer_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (msan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ASTFuzzerTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/ast_fuzzer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=AST fuzzer (debug) - REPO_COPY=${{runner.temp}}/ast_fuzzer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Fuzzer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 ast_fuzzer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: AST fuzzer (debug) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 ast_fuzzer_check.py "$CHECK_NAME" ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# @@ -2078,286 +1963,26 @@ jobs: ############################################################################################# #################################### PERFORMANCE TESTS ###################################### ############################################################################################# - PerformanceComparisonX86-0: + PerformanceComparisonX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonX86-3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Performance Comparison + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 performance_comparison_check.py "$CHECK_NAME" + PerformanceComparisonAarch: needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-1: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-2: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - PerformanceComparisonAarch-3: - needs: [BuilderDebAarch64] - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/performance_comparison - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Performance Comparison Aarch64 - REPO_COPY=${{runner.temp}}/performance_comparison/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Performance Comparison - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 performance_comparison_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Performance Comparison Aarch64 + runner_type: func-tester-aarch64 + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 performance_comparison_check.py "$CHECK_NAME" ############################################################################################## ###################################### SQLANCER FUZZERS ###################################### ############################################################################################## @@ -2508,14 +2133,8 @@ jobs: - IntegrationTestsTsan3 - IntegrationTestsTsan4 - IntegrationTestsTsan5 - - PerformanceComparisonX86-0 - - PerformanceComparisonX86-1 - - PerformanceComparisonX86-2 - - PerformanceComparisonX86-3 - - PerformanceComparisonAarch-0 - - PerformanceComparisonAarch-1 - - PerformanceComparisonAarch-2 - - PerformanceComparisonAarch-3 + - PerformanceComparisonX86 + - PerformanceComparisonAarch - UnitTestsAsan - UnitTestsTsan - UnitTestsMsan From db8a548718be4ba750783b302797faca473fb80d Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Fri, 10 Nov 2023 13:25:38 +0100 Subject: [PATCH 64/99] Rewrite integrations checks --- .github/workflows/backport_branches.yml | 39 +- .github/workflows/master.yml | 828 ++-------------------- .github/workflows/pull_request.yml | 869 ++---------------------- .github/workflows/release_branches.yml | 359 ++-------- 4 files changed, 136 insertions(+), 1959 deletions(-) diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index d7fe66472d0..ddf9ae1d384 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -386,37 +386,14 @@ jobs: ############################################################################################# IntegrationTestsRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (release) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" FinishCheck: needs: - DockerHubPush diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index aa45970a965..3e68ab83e98 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -725,776 +725,46 @@ jobs: ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# - IntegrationTestsAsan0: + IntegrationTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsAnalyzerAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan, analyzer) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan5: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (tsan) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (release) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" ############################################################################################## ##################################### AST FUZZERS ############################################ ############################################################################################## @@ -1837,28 +1107,10 @@ jobs: - StressTestTsan - StressTestMsan - StressTestUBsan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - - IntegrationTestsAsan3 - - IntegrationTestsAsan4 - - IntegrationTestsAsan5 - - IntegrationTestsAnalyzerAsan0 - - IntegrationTestsAnalyzerAsan1 - - IntegrationTestsAnalyzerAsan2 - - IntegrationTestsAnalyzerAsan3 - - IntegrationTestsAnalyzerAsan4 - - IntegrationTestsAnalyzerAsan5 - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - IntegrationTestsRelease2 - - IntegrationTestsRelease3 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 - - IntegrationTestsTsan4 - - IntegrationTestsTsan5 + - IntegrationTestsAsan + - IntegrationTestsAnalyzerAsan + - IntegrationTestsTsan + - IntegrationTestsRelease - PerformanceComparisonX86 - PerformanceComparisonAarch - CompatibilityCheckX86 diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 563131ceec3..a639bf393b8 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -989,809 +989,56 @@ jobs: ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# - IntegrationTestsAsan0: + IntegrationTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsAnalyzerAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan0: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan1: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan3: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan4: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAnalyzerAsan5: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan, analyzer) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan, analyzer) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan4: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=4 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan5: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=5 - RUN_BY_HASH_TOTAL=6 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (tsan) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease2: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease3: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (release) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" IntegrationTestsFlakyCheck: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan_flaky_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests flaky check (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan_flaky_check/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests flaky check (asan) + runner_type: stress-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" ############################################################################################# #################################### UNIT TESTS ############################################# ############################################################################################# @@ -2111,28 +1358,11 @@ jobs: - ASTFuzzerTestTsan - ASTFuzzerTestMSan - ASTFuzzerTestUBSan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - - IntegrationTestsAsan3 - - IntegrationTestsAsan4 - - IntegrationTestsAsan5 - - IntegrationTestsAnalyzerAsan0 - - IntegrationTestsAnalyzerAsan1 - - IntegrationTestsAnalyzerAsan2 - - IntegrationTestsAnalyzerAsan3 - - IntegrationTestsAnalyzerAsan4 - - IntegrationTestsAnalyzerAsan5 - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - IntegrationTestsRelease2 - - IntegrationTestsRelease3 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 - - IntegrationTestsTsan4 - - IntegrationTestsTsan5 + - IntegrationTestsAsan + - IntegrationTestsAnalyzerAsan + - IntegrationTestsTsan + - IntegrationTestsRelease + - IntegrationTestsFlakyCheck - PerformanceComparisonX86 - PerformanceComparisonAarch - UnitTestsAsan @@ -2142,7 +1372,6 @@ jobs: - UnitTestsReleaseClang - CompatibilityCheckX86 - CompatibilityCheckAarch64 - - IntegrationTestsFlakyCheck - SQLancerTestRelease - SQLancerTestDebug runs-on: [self-hosted, style-checker] diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index 77e43656fd3..a344f23b2c0 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -568,321 +568,46 @@ jobs: ############################################################################################# ############################# INTEGRATION TESTS ############################################# ############################################################################################# - IntegrationTestsAsan0: + IntegrationTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan1: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsAnalyzerAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsAsan2: - needs: [BuilderDebAsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (asan) - REPO_COPY=${{runner.temp}}/integration_tests_asan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=3 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (asan, analyzer) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan1: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan2: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=2 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsTsan3: - needs: [BuilderDebTsan] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (tsan) - REPO_COPY=${{runner.temp}}/integration_tests_tsan/ClickHouse - RUN_BY_HASH_NUM=3 - RUN_BY_HASH_TOTAL=4 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease0: + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (tsan) + runner_type: stress-tester + batches: 6 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" + IntegrationTestsRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=0 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" - IntegrationTestsRelease1: - needs: [BuilderDebRelease] - runs-on: [self-hosted, stress-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/integration_tests_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Integration tests (release) - REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse - RUN_BY_HASH_NUM=1 - RUN_BY_HASH_TOTAL=2 - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Integration test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 integration_test_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Integration tests (release) + runner_type: stress-tester + batches: 4 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 integration_test_check.py "$CHECK_NAME" FinishCheck: needs: - DockerHubPush @@ -909,15 +634,9 @@ jobs: - StressTestTsan - StressTestMsan - StressTestUBsan - - IntegrationTestsAsan0 - - IntegrationTestsAsan1 - - IntegrationTestsAsan2 - - IntegrationTestsRelease0 - - IntegrationTestsRelease1 - - IntegrationTestsTsan0 - - IntegrationTestsTsan1 - - IntegrationTestsTsan2 - - IntegrationTestsTsan3 + - IntegrationTestsAsan + - IntegrationTestsTsan + - IntegrationTestsRelease - CompatibilityCheckX86 - CompatibilityCheckAarch64 runs-on: [self-hosted, style-checker] From 38b251946e5b8bd819369842442277ae6e6ff67b Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Fri, 10 Nov 2023 14:29:24 +0100 Subject: [PATCH 65/99] Add another check to CiConfig.validate, and test for it --- tests/ci/ci_config.py | 18 ++++++++++++------ tests/ci/test_ci_config.py | 15 +++++++++++++++ 2 files changed, 27 insertions(+), 6 deletions(-) create mode 100644 tests/ci/test_ci_config.py diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index dc22babb907..6ba3e0992d4 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -50,21 +50,27 @@ class CiConfig: def validate(self) -> None: errors = [] - # All build configs must belong to build_report_config - for build_name in self.build_config.keys(): + for name, build_config in self.build_config.items(): build_in_reports = False for report_config in self.builds_report_config.values(): - if build_name in report_config: + if name in report_config: build_in_reports = True break + # All build configs must belong to build_report_config if not build_in_reports: + logging.error("Build name %s does not belong to build reports", name) + errors.append(f"Build name {name} does not belong to build reports") + # The name should be the same as build_config.name + if not build_config.name == name: logging.error( - "Build name %s does not belong to build reports", build_name + "Build name '%s' does not match the config 'name' value '%s'", + name, + build_config.name, ) errors.append( - f"Build name {build_name} does not belong to build reports" + f"Build name {name} does not match 'name' value '{build_config.name}'" ) - # And otherwise + # All build_report_config values should be in build_config.keys() for build_report_name, build_names in self.builds_report_config.items(): missed_names = [ name for name in build_names if name not in self.build_config.keys() diff --git a/tests/ci/test_ci_config.py b/tests/ci/test_ci_config.py new file mode 100644 index 00000000000..d22ed16748e --- /dev/null +++ b/tests/ci/test_ci_config.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import unittest + + +class TestCiConfig(unittest.TestCase): + def test_no_errors_in_ci_config(self): + raised = None + try: + from ci_config import ( # pylint: disable=import-outside-toplevel + CI_CONFIG as _, + ) + except Exception as exc: + raised = exc + self.assertIsNone(raised, f"CI_CONFIG import raised error {raised}") From a831a648cdde2db874873714929b75fe9f9f4865 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Fri, 10 Nov 2023 17:47:40 +0100 Subject: [PATCH 66/99] Move GITHUB_JOB_OVERRIDDEN to job ENV context --- .github/workflows/reusable_build.yml | 3 ++- .github/workflows/reusable_test.yml | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index f6586016874..7b88c2b9925 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -25,6 +25,8 @@ name: Build ClickHouse jobs: Build: name: Build-${{inputs.build_name}} + env: + GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}} runs-on: [self-hosted, '${{inputs.runner_type}}'] steps: - name: Check out repository code @@ -37,7 +39,6 @@ jobs: - name: Set build envs run: | cat >> "$GITHUB_ENV" << 'EOF' - GITHUB_JOB_OVERRIDDEN=Build-${{inputs.build_name}} ${{inputs.additional_envs}} EOF python3 "$GITHUB_WORKSPACE"/tests/ci/ci_config.py --build-name "${{inputs.build_name}}" >> "$GITHUB_ENV" diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 19939d114f2..4e65733542a 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -51,7 +51,10 @@ jobs: batches_output=$(python3 -c 'import json; print(json.dumps(list(range(${{inputs.batches}}))))') echo "batches=${batches_output}" >> "$GITHUB_OUTPUT" Test: - name: ${{inputs.test_name}}-${{matrix.batch}} + # Do not add `-0` to the end, if there's only one batch + name: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }} + env: + GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }} runs-on: [self-hosted, '${{inputs.runner_type}}'] needs: [PrepareStrategy] strategy: @@ -69,7 +72,6 @@ jobs: - name: Set build envs run: | cat >> "$GITHUB_ENV" << 'EOF' - GITHUB_JOB_OVERRIDDEN=${{inputs.test_name}}-${{matrix.batch}} ${{inputs.additional_envs}} EOF - name: Common setup From 88e04579fdc4b79707e0a974e7d77dc50b9bbd2d Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Fri, 10 Nov 2023 19:45:57 +0100 Subject: [PATCH 67/99] Add a simple validator for reusable workflows --- utils/check-style/check-workflows | 8 ++- utils/check-style/check_reusable_workflows.py | 53 +++++++++++++++++++ 2 files changed, 60 insertions(+), 1 deletion(-) create mode 100644 utils/check-style/check_reusable_workflows.py diff --git a/utils/check-style/check-workflows b/utils/check-style/check-workflows index df2292d84ca..fb41d5af461 100755 --- a/utils/check-style/check-workflows +++ b/utils/check-style/check-workflows @@ -2,8 +2,14 @@ set -e +WORKING_DIR=$(dirname "$0") +cd "$WORKING_DIR" + GIT_ROOT=$(git rev-parse --show-cdup) -GIT_ROOT=${GIT_ROOT:-.} +GIT_ROOT=${GIT_ROOT:-../../} act --list --directory="$GIT_ROOT" 1>/dev/null 2>&1 || act --list --directory="$GIT_ROOT" 2>&1 actionlint -ignore 'reusable workflow call.+' || : + + +python3 check_reusable_workflows.py diff --git a/utils/check-style/check_reusable_workflows.py b/utils/check-style/check_reusable_workflows.py new file mode 100644 index 00000000000..6fe22786650 --- /dev/null +++ b/utils/check-style/check_reusable_workflows.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 + +from pathlib import Path +from typing import Dict, Iterable, List +import yaml + +git_root = Path(__file__).absolute().parents[2] + + +def check_workflows(paths: Iterable[Path]) -> List[str]: + outputs = [] # type: List[str] + for path in paths: + workflow_object = yaml.safe_load(path.read_bytes()) + workflow_object["file---name"] = path.name + outputs.extend(check_name_override(workflow_object)) + + return outputs + + +def check_name_override(workflow_object: dict) -> List[str]: + outputs = [] # type: List[str] + workflow_file = workflow_object.get("file---name", "") # type: str + jobs = workflow_object.get("jobs", {}) # type: Dict[str, dict] + for name, obj in jobs.items(): + header = f"Workflow '{workflow_file}': Job '{name}': " + name_overriden = obj.get("name", "") + env_name_overriden = obj.get("env", {}).get("GITHUB_JOB_OVERRIDDEN", "") + if name_overriden or env_name_overriden: + if not (name_overriden and env_name_overriden): + outputs.append( + f"{header}job has one of 'name' and 'env.GITHUB_JOB_OVERRIDDEN', " + "but not both" + ) + elif name_overriden != env_name_overriden: + outputs.append( + f"{header}value of 'name' and 'env.GITHUB_JOB_OVERRIDDEN' are not " + f"equal. name={name_overriden}; " + f"env.GITHUB_JOB_OVERRIDDEN={env_name_overriden}" + ) + return outputs + + +def main() -> None: + reusable_workflow_paths = git_root.glob(".github/workflows/reusable_*.y*ml") + outputs = check_workflows(reusable_workflow_paths) + if outputs: + print("Found next issues for workflows:") + for o in outputs: + print(o) + + +if __name__ == "__main__": + main() From 6f8baf3def2e8836db88c33f51f5fc2835c0cfde Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Fri, 10 Nov 2023 21:16:54 +0100 Subject: [PATCH 68/99] Rewrite style checks to reusable workflow --- .github/workflows/docs_check.yml | 45 +++++++++-------------------- .github/workflows/master.yml | 36 +++++------------------ .github/workflows/pull_request.yml | 42 ++++++++------------------- .github/workflows/reusable_test.yml | 5 ++++ tests/ci/env_helper.py | 2 +- tests/ci/style_check.py | 8 +++-- 6 files changed, 45 insertions(+), 93 deletions(-) diff --git a/.github/workflows/docs_check.yml b/.github/workflows/docs_check.yml index dada9999a68..c311679c8a3 100644 --- a/.github/workflows/docs_check.yml +++ b/.github/workflows/docs_check.yml @@ -96,38 +96,21 @@ jobs: path: ${{ runner.temp }}/changed_images.json StyleCheck: needs: DockerHubPush - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - EOF - - name: Download changed images - # even if artifact does not exist, e.g. on `do not test` label or failed Docker job - continue-on-error: true - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Style Check - run: | - cd "$GITHUB_WORKSPACE/tests/ci" - python3 style_check.py --no-push - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Style check + runner_type: style-checker + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 style_check.py CompatibilityCheckX86: needs: [BuilderDebRelease] runs-on: [self-hosted, style-checker] diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index a639bf393b8..8b7f7d94eed 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -118,39 +118,21 @@ jobs: path: ${{ runner.temp }}/changed_images.json StyleCheck: needs: DockerHubPush - runs-on: [self-hosted, style-checker] # We need additional `&& ! cancelled()` to have the job being able to cancel if: ${{ success() || failure() || ( always() && ! cancelled() ) }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{ runner.temp }}/style_check - ROBOT_CLICKHOUSE_SSH_KEY<> "$GITHUB_ENV" << 'EOF' ${{inputs.additional_envs}} + ${{secrets.secret_envs}} EOF - name: Common setup uses: ./.github/actions/common_setup diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 2d867e62228..6364ea0ff7c 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -24,7 +24,7 @@ GITHUB_WORKSPACE = os.getenv("GITHUB_WORKSPACE", git_root) GITHUB_RUN_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/actions/runs/{GITHUB_RUN_ID}" IMAGES_PATH = os.getenv("IMAGES_PATH", TEMP_PATH) REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports"))) -REPO_COPY = os.getenv("REPO_COPY", git_root) +REPO_COPY = os.getenv("REPO_COPY", GITHUB_WORKSPACE) RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp"))) S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds") S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports") diff --git a/tests/ci/style_check.py b/tests/ci/style_check.py index 83dc54a57b8..a006e01ff6b 100644 --- a/tests/ci/style_check.py +++ b/tests/ci/style_check.py @@ -21,7 +21,7 @@ from commit_status_helper import ( update_mergeable_check, ) from docker_pull_helper import get_image_with_version -from env_helper import GITHUB_WORKSPACE, TEMP_PATH +from env_helper import REPO_COPY, REPORTS_PATH, TEMP_PATH from get_robot_token import get_best_robot_token from github_helper import GitHub from git_helper import git_runner @@ -139,9 +139,11 @@ def main(): stopwatch = Stopwatch() - repo_path = Path(GITHUB_WORKSPACE) + repo_path = Path(REPO_COPY) temp_path = Path(TEMP_PATH) temp_path.mkdir(parents=True, exist_ok=True) + reports_path = Path(REPORTS_PATH) + reports_path.mkdir(parents=True, exist_ok=True) pr_info = PRInfo() if args.push: @@ -161,7 +163,7 @@ def main(): code = int(state != "success") sys.exit(code) - docker_image = get_image_with_version(temp_path, "clickhouse/style-test") + docker_image = get_image_with_version(reports_path, "clickhouse/style-test") s3_helper = S3Helper() cmd = ( From b6a71ae6f4bcf001e09947ece06f485b4d39a0eb Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 00:16:22 +0100 Subject: [PATCH 69/99] Rewrite fast tests to reusable workflow --- .github/workflows/pull_request.yml | 36 +++++++----------------------- tests/ci/fast_test_check.py | 7 +++--- 2 files changed, 12 insertions(+), 31 deletions(-) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 8b7f7d94eed..ed64a900ab7 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -135,34 +135,14 @@ jobs: RCSK FastTest: needs: DockerHubPush - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/fasttest - REPO_COPY=${{runner.temp}}/fasttest/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Fast Test - run: | - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 fast_test_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Fast tests + runner_type: builder + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 fast_test_check.py CompatibilityCheckX86: needs: [BuilderDebRelease] runs-on: [self-hosted, style-checker] diff --git a/tests/ci/fast_test_check.py b/tests/ci/fast_test_check.py index 3e7f9debb4c..ee046d6dfef 100644 --- a/tests/ci/fast_test_check.py +++ b/tests/ci/fast_test_check.py @@ -24,7 +24,7 @@ from commit_status_helper import ( format_description, ) from docker_pull_helper import get_image_with_version, DockerImage -from env_helper import S3_BUILDS_BUCKET, TEMP_PATH, REPO_COPY +from env_helper import S3_BUILDS_BUCKET, TEMP_PATH, REPO_COPY, REPORTS_PATH from get_robot_token import get_best_robot_token from pr_info import FORCE_TESTS_LABEL, PRInfo from report import TestResult, TestResults, read_test_results @@ -117,8 +117,9 @@ def main(): args = parse_args() temp_path = Path(TEMP_PATH) - temp_path.mkdir(parents=True, exist_ok=True) + reports_path = Path(REPORTS_PATH) + reports_path.mkdir(parents=True, exist_ok=True) pr_info = PRInfo() @@ -135,7 +136,7 @@ def main(): sys.exit(1) sys.exit(0) - docker_image = get_image_with_version(temp_path, "clickhouse/fasttest") + docker_image = get_image_with_version(reports_path, "clickhouse/fasttest") s3_helper = S3Helper() From 6e00e14a45e485ae75d519b908f411e4faec1622 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 00:24:14 +0100 Subject: [PATCH 70/99] Rewrite libfuzzer tests to reusable workflow Update fuzzers clang version --- .github/workflows/libfuzzer.yml | 94 +++++---------------------------- tests/ci/ci_config.py | 2 +- 2 files changed, 14 insertions(+), 82 deletions(-) diff --git a/.github/workflows/libfuzzer.yml b/.github/workflows/libfuzzer.yml index e8a0396684a..aabf6275c05 100644 --- a/.github/workflows/libfuzzer.yml +++ b/.github/workflows/libfuzzer.yml @@ -10,86 +10,18 @@ on: # yamllint disable-line rule:truthy workflow_call: jobs: BuilderFuzzers: - runs-on: [self-hosted, builder] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/build_check - IMAGES_PATH=${{runner.temp}}/images_path - REPO_COPY=${{runner.temp}}/build_check/ClickHouse - CACHES_PATH=${{runner.temp}}/../ccaches - BUILD_NAME=fuzzers - EOF - - name: Download changed images - # even if artifact does not exist, e.g. on `do not test` label or failed Docker job - continue-on-error: true - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.IMAGES_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: true - ref: ${{github.ref}} - - name: Build - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME" - - name: Upload build URLs to artifacts - if: ${{ success() || failure() }} - uses: actions/upload-artifact@v3 - with: - name: ${{ env.BUILD_URLS }} - path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" "$CACHES_PATH" + uses: ./.github/workflows/reusable_build.yml + with: + build_name: fuzzers libFuzzerTest: needs: [BuilderFuzzers] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/libfuzzer - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=libFuzzer tests - REPO_COPY=${{runner.temp}}/libfuzzer/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download changed images - # even if artifact does not exist, e.g. on `do not test` label or failed Docker job - continue-on-error: true - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: libFuzzer test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: libFuzzer tests + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 6ba3e0992d4..d1d2021753e 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -222,7 +222,7 @@ CI_CONFIG = CiConfig( ), "fuzzers": BuildConfig( name="fuzzers", - compiler="clang-16", + compiler="clang-17", package_type="fuzzers", ), }, From 15d27d5e859d66fce8d77cfa3ba1d4d4636511aa Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 00:29:57 +0100 Subject: [PATCH 71/99] Rewrite docs test to reusable workflow --- .github/workflows/docs_check.yml | 37 ++++++++------------------------ tests/ci/docs_check.py | 6 ++++-- 2 files changed, 13 insertions(+), 30 deletions(-) diff --git a/.github/workflows/docs_check.yml b/.github/workflows/docs_check.yml index c311679c8a3..d7699f0419d 100644 --- a/.github/workflows/docs_check.yml +++ b/.github/workflows/docs_check.yml @@ -113,34 +113,15 @@ jobs: RCSK DocsCheck: needs: DockerHubPush - runs-on: [self-hosted, func-tester-aarch64] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/docs_check - REPO_COPY=${{runner.temp}}/docs_check/ClickHouse - EOF - - name: Download changed images - uses: actions/download-artifact@v3 - with: - name: changed_images - path: ${{ env.TEMP_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Docs Check - run: | - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 docs_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Docs check + runner_type: func-tester-aarch64 + additional_envs: | + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 docs_check.py FinishCheck: needs: - StyleCheck diff --git a/tests/ci/docs_check.py b/tests/ci/docs_check.py index f7339d59a5e..650ed93aa71 100644 --- a/tests/ci/docs_check.py +++ b/tests/ci/docs_check.py @@ -17,7 +17,7 @@ from commit_status_helper import ( update_mergeable_check, ) from docker_pull_helper import get_image_with_version -from env_helper import TEMP_PATH, REPO_COPY +from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH from get_robot_token import get_best_robot_token from pr_info import PRInfo from report import TestResults, TestResult @@ -57,6 +57,8 @@ def main(): temp_path = Path(TEMP_PATH) temp_path.mkdir(parents=True, exist_ok=True) + reports_path = Path(REPORTS_PATH) + reports_path.mkdir(parents=True, exist_ok=True) repo_path = Path(REPO_COPY) pr_info = PRInfo(need_changed_files=True) @@ -82,7 +84,7 @@ def main(): elif args.force: logging.info("Check the docs because of force flag") - docker_image = get_image_with_version(temp_path, "clickhouse/docs-builder") + docker_image = get_image_with_version(reports_path, "clickhouse/docs-builder") test_output = temp_path / "docs_check_log" test_output.mkdir(parents=True, exist_ok=True) From 9937d0d5b8fd20747b627f2ff13e65f6a9712338 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 00:58:04 +0100 Subject: [PATCH 72/99] Rewrite jepsen to reusable workflow --- .github/workflows/jepsen.yml | 71 ++++++++---------------------------- 1 file changed, 16 insertions(+), 55 deletions(-) diff --git a/.github/workflows/jepsen.yml b/.github/workflows/jepsen.yml index 7f1fd16aa89..5ec038231ec 100644 --- a/.github/workflows/jepsen.yml +++ b/.github/workflows/jepsen.yml @@ -11,60 +11,21 @@ on: # yamllint disable-line rule:truthy workflow_call: jobs: KeeperJepsenRelease: - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/keeper_jepsen - REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - fetch-depth: 0 - filter: tree:0 - - name: Jepsen Test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 jepsen_check.py keeper - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Jepsen keeper check + runner_type: style-checker + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 jepsen_check.py keeper # ServerJepsenRelease: # runs-on: [self-hosted, style-checker] - # if: ${{ always() }} - # needs: [KeeperJepsenRelease] - # steps: - # - name: Set envs - # run: | - # cat >> "$GITHUB_ENV" << 'EOF' - # TEMP_PATH=${{runner.temp}}/server_jepsen - # REPO_COPY=${{runner.temp}}/server_jepsen/ClickHouse - # EOF - # - name: Check out repository code - # uses: ClickHouse/checkout@v1 - # with: - # clear-repository: true - # fetch-depth: 0 - # filter: tree:0 - # - name: Jepsen Test - # run: | - # sudo rm -fr "$TEMP_PATH" - # mkdir -p "$TEMP_PATH" - # cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - # cd "$REPO_COPY/tests/ci" - # python3 jepsen_check.py server - # - name: Cleanup - # if: always() - # run: | - # docker ps --quiet | xargs --no-run-if-empty docker kill ||: - # docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - # sudo rm -fr "$TEMP_PATH" + # uses: ./.github/workflows/reusable_test.yml + # with: + # test_name: Jepsen server check + # runner_type: style-checker + # batches: 1 + # run_command: | + # cd "$REPO_COPY/tests/ci" + # python3 jepsen_check.py server From b90a27af8605311432309a23abc2ea5ff8ba2faf Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 01:02:18 +0100 Subject: [PATCH 73/99] Rewrite compatibility checks to reusable workflow --- .github/workflows/backport_branches.yml | 72 +++++-------------------- .github/workflows/master.yml | 72 +++++-------------------- .github/workflows/pull_request.yml | 72 +++++-------------------- .github/workflows/release_branches.yml | 72 +++++-------------------- 4 files changed, 56 insertions(+), 232 deletions(-) diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index ddf9ae1d384..1f0e424bf5c 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -105,66 +105,22 @@ jobs: path: ${{ runner.temp }}/changed_images.json CompatibilityCheckX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions CompatibilityCheckAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index df56084e4e0..0d232dc8f06 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -100,66 +100,22 @@ jobs: python3 style_check.py CompatibilityCheckX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions CompatibilityCheckAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index ed64a900ab7..aa104fd3874 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -145,66 +145,22 @@ jobs: python3 fast_test_check.py CompatibilityCheckX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions CompatibilityCheckAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index a344f23b2c0..e78740a6564 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -76,66 +76,22 @@ jobs: path: ${{ runner.temp }}/changed_images.json CompatibilityCheckX86: needs: [BuilderDebRelease] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckX86 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions CompatibilityCheckAarch64: needs: [BuilderDebAarch64] - runs-on: [self-hosted, style-checker] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/compatibility_check - REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse - REPORTS_PATH=${{runner.temp}}/reports_dir - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: CompatibilityCheckAarch64 - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Compatibility check X86 + runner_type: style-checker + run_command: | + cd "$REPO_COPY/tests/ci" + python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc ######################################################################################### #################################### ORDINARY BUILDS #################################### ######################################################################################### From 7d3440add56a7b74a78437562dda9ec5b1a54190 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 01:06:13 +0100 Subject: [PATCH 74/99] Delete codebrowser leftovers --- .github/workflows/nightly.yml | 3 - .github/workflows/woboq.yml | 44 ---------- tests/ci/codebrowser_check.py | 150 ---------------------------------- 3 files changed, 197 deletions(-) delete mode 100644 .github/workflows/woboq.yml delete mode 100644 tests/ci/codebrowser_check.py diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 6452b83fdd6..1e94f70b9e6 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -74,9 +74,6 @@ jobs: with: name: changed_images path: ${{ runner.temp }}/changed_images.json - Codebrowser: - needs: [DockerHubPush] - uses: ./.github/workflows/woboq.yml SonarCloud: runs-on: [self-hosted, builder] env: diff --git a/.github/workflows/woboq.yml b/.github/workflows/woboq.yml deleted file mode 100644 index 1ef729af30a..00000000000 --- a/.github/workflows/woboq.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: WoboqBuilder -env: - # Force the stdout and stderr streams to be unbuffered - PYTHONUNBUFFERED: 1 - -concurrency: - group: woboq -on: # yamllint disable-line rule:truthy - workflow_dispatch: - workflow_call: -jobs: - # don't use dockerhub push because this image updates so rarely - WoboqCodebrowser: - runs-on: [self-hosted, style-checker] - timeout-minutes: 420 # the task is pretty heavy, so there's an additional hour - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/codebrowser - REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse - IMAGES_PATH=${{runner.temp}}/images_path - EOF - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - submodules: 'true' - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.IMAGES_PATH }} - - name: Codebrowser - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" diff --git a/tests/ci/codebrowser_check.py b/tests/ci/codebrowser_check.py deleted file mode 100644 index a3414156bba..00000000000 --- a/tests/ci/codebrowser_check.py +++ /dev/null @@ -1,150 +0,0 @@ -#!/usr/bin/env python3 - - -import logging -import os -from pathlib import Path - -from github import Github - -from commit_status_helper import get_commit, post_commit_status -from docker_pull_helper import get_image_with_version, DockerImage -from env_helper import ( - IMAGES_PATH, - REPO_COPY, - S3_DOWNLOAD, - S3_BUILDS_BUCKET, - S3_TEST_REPORTS_BUCKET, - TEMP_PATH, -) -from get_robot_token import get_best_robot_token -from pr_info import PRInfo -from report import TestResult -from s3_helper import S3Helper -from stopwatch import Stopwatch -from tee_popen import TeePopen -from upload_result_helper import upload_results - -NAME = "Woboq Build" - - -def get_run_command( - repo_path: Path, output_path: Path, image: DockerImage, sha: str -) -> str: - user = f"{os.geteuid()}:{os.getegid()}" - cmd = ( - f"docker run --rm --user={user} --volume={repo_path}:/build " - f"--volume={output_path}:/workdir/output --network=host " - # use sccache, https://github.com/KDAB/codebrowser/issues/111 - f"-e SCCACHE_BUCKET='{S3_BUILDS_BUCKET}' " - "-e SCCACHE_S3_KEY_PREFIX=ccache/sccache " - '-e CMAKE_FLAGS="$CMAKE_FLAGS -DCOMPILER_CACHE=sccache" ' - f"-e 'DATA={S3_DOWNLOAD}/{S3_TEST_REPORTS_BUCKET}/codebrowser/data' " - f"-e SHA={sha} {image}" - ) - return cmd - - -def main(): - logging.basicConfig(level=logging.INFO) - - stopwatch = Stopwatch() - - gh = Github(get_best_robot_token(), per_page=100) - pr_info = PRInfo() - commit = get_commit(gh, pr_info.sha) - temp_path = Path(TEMP_PATH) - - if not temp_path.exists(): - os.makedirs(temp_path) - - docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser") - # FIXME: the codebrowser is broken with clang-16, workaround with clang-15 - # See https://github.com/ClickHouse/ClickHouse/issues/50077 - docker_image.version = "49701-4dcdcf4c11b5604f1c5d3121c9c6fea3e957b605" - s3_helper = S3Helper() - - result_path = temp_path / "result_path" - if not result_path.exists(): - os.makedirs(result_path) - - run_command = get_run_command( - Path(REPO_COPY), result_path, docker_image, pr_info.sha[:12] - ) - - logging.info("Going to run codebrowser: %s", run_command) - - run_log_path = result_path / "run.log" - - state = "success" - with TeePopen(run_command, run_log_path) as process: - retcode = process.wait() - if retcode == 0: - logging.info("Run successfully") - else: - logging.info("Run failed") - state = "failure" - - report_path = result_path / "html_report" - logging.info("Report path %s", report_path) - - s3_path_prefix = "codebrowser" - index_template = ( - f'' - "{}" - ) - additional_logs = [path.absolute() for path in result_path.glob("*.log")] - test_results = [ - TestResult( - index_template.format("Generate codebrowser site"), - state, - stopwatch.duration_seconds, - additional_logs, - ) - ] - - if state == "success": - stopwatch.reset() - _ = s3_helper.fast_parallel_upload_dir( - report_path, s3_path_prefix, S3_TEST_REPORTS_BUCKET - ) - test_results.append( - TestResult( - index_template.format("Upload codebrowser site"), - state, - stopwatch.duration_seconds, - ) - ) - - # Check if the run log contains `FATAL Error:`, that means the code problem - stopwatch.reset() - fatal_error = "FATAL Error:" - logging.info("Search for '%s' in %s", fatal_error, run_log_path) - with open(run_log_path, "r", encoding="utf-8") as rlfd: - for line in rlfd.readlines(): - if "FATAL Error:" in line: - logging.warning( - "The line '%s' found, mark the run as failure", fatal_error - ) - state = "failure" - test_results.append( - TestResult( - "Indexing error", - state, - stopwatch.duration_seconds, - additional_logs, - ) - ) - break - - report_url = upload_results( - s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME - ) - - print(f"::notice ::Report url: {report_url}") - - post_commit_status(commit, state, report_url, "Report built", NAME, pr_info) - - -if __name__ == "__main__": - main() From cdf417defec42314b0078cbae7bd7b159e8a887b Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 01:11:26 +0100 Subject: [PATCH 75/99] Rewrite unit tests to reusable workflow --- .github/workflows/master.yml | 195 ++++++----------------------- .github/workflows/pull_request.yml | 195 ++++++----------------------- 2 files changed, 80 insertions(+), 310 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 0d232dc8f06..22d4371b0f9 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -759,169 +759,54 @@ jobs: ############################################################################################# UnitTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (asan) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (asan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsReleaseClang: needs: [BuilderBinRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (release) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (release) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (tsan) - REPO_COPY=${{runner.temp}}/unit_tests_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (tsan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (msan) - REPO_COPY=${{runner.temp}}/unit_tests_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (msan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (ubsan) - REPO_COPY=${{runner.temp}}/unit_tests_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (ubsan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" ############################################################################################# #################################### PERFORMANCE TESTS ###################################### ############################################################################################# diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index aa104fd3874..6c703ac1518 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -962,169 +962,54 @@ jobs: ############################################################################################# UnitTestsAsan: needs: [BuilderDebAsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (asan) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (asan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsReleaseClang: needs: [BuilderBinRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_asan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (release) - REPO_COPY=${{runner.temp}}/unit_tests_asan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (release) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsTsan: needs: [BuilderDebTsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_tsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (tsan) - REPO_COPY=${{runner.temp}}/unit_tests_tsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (tsan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsMsan: needs: [BuilderDebMsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_msan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (msan) - REPO_COPY=${{runner.temp}}/unit_tests_msan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (msan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" UnitTestsUBsan: needs: [BuilderDebUBsan] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/unit_tests_ubsan - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Unit tests (ubsan) - REPO_COPY=${{runner.temp}}/unit_tests_ubsan/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Unit test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 unit_tests_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Unit tests (ubsan) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 unit_tests_check.py "$CHECK_NAME" ############################################################################################# #################################### PERFORMANCE TESTS ###################################### ############################################################################################# From 111bc9e6288e334aa93e16489898947cd13cd515 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 01:20:02 +0100 Subject: [PATCH 76/99] Rewrite leftovers to reusable workflow --- .github/workflows/master.yml | 78 +++-------- .github/workflows/pull_request.yml | 202 ++++++++--------------------- 2 files changed, 72 insertions(+), 208 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 22d4371b0f9..8466c749933 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -835,70 +835,24 @@ jobs: ############################################################################################## SQLancerTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (release) - REPO_COPY=${{runner.temp}}/sqlancer_release/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLancer (release) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqlancer_check.py "$CHECK_NAME" SQLancerTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (debug) - REPO_COPY=${{runner.temp}}/sqlancer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLancer (debug) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqlancer_check.py "$CHECK_NAME" FinishCheck: needs: - DockerHubPush diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 6c703ac1518..76cd9248de8 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -1038,80 +1038,24 @@ jobs: ############################################################################################## SQLancerTestRelease: needs: [BuilderDebRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_release - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (release) - REPO_COPY=${{runner.temp}}/sqlancer_release/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLancer (release) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqlancer_check.py "$CHECK_NAME" SQLancerTestDebug: needs: [BuilderDebDebug] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqlancer_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLancer (debug) - REPO_COPY=${{runner.temp}}/sqlancer_debug/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLancer - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqlancer_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################# -###################################### JEPSEN TESTS ######################################### -############################################################################################# - Jepsen: - # This is special test NOT INCLUDED in FinishCheck - # When it's skipped, all dependent tasks will be skipped too. - # DO NOT add it there - if: contains(github.event.pull_request.labels.*.name, 'jepsen-test') - needs: [BuilderBinRelease] - uses: ./.github/workflows/jepsen.yml + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLancer (debug) + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqlancer_check.py "$CHECK_NAME" FinishCheck: needs: - StyleCheck @@ -1188,6 +1132,46 @@ jobs: cd "$GITHUB_WORKSPACE/tests/ci" python3 finish_check.py python3 merge_pr.py --check-approved +############################################################################################## +############################ SQLLOGIC TEST ################################################### +############################################################################################## + SQLLogicTestRelease: + needs: [BuilderDebRelease] + uses: ./.github/workflows/reusable_test.yml + with: + test_name: Sqllogic test (release) + runner_type: func-tester + additional_envs: | + KILL_TIMEOUT=10800 + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqllogic_test.py "$CHECK_NAME" "$KILL_TIMEOUT" +############################################################################################## +##################################### SQL TEST ############################################### +############################################################################################## + SQLTest: + needs: [BuilderDebRelease] + uses: ./.github/workflows/reusable_test.yml + with: + test_name: SQLTest + runner_type: fuzzer-unit-tester + batches: 1 + run_command: | + cd "$REPO_COPY/tests/ci" + python3 sqltest.py "$CHECK_NAME" +############################################################################################# +###################################### NOT IN FINISH ######################################## +############################################################################################# +###################################### JEPSEN TESTS ######################################### +############################################################################################# + Jepsen: + # This is special test NOT INCLUDED in FinishCheck + # When it's skipped, all dependent tasks will be skipped too. + # DO NOT add it there + if: contains(github.event.pull_request.labels.*.name, 'jepsen-test') + needs: [BuilderBinRelease] + uses: ./.github/workflows/jepsen.yml ############################################################################################# ####################################### libFuzzer ########################################### ############################################################################################# @@ -1195,77 +1179,3 @@ jobs: if: contains(github.event.pull_request.labels.*.name, 'libFuzzer') needs: [DockerHubPush, StyleCheck] uses: ./.github/workflows/libfuzzer.yml - ############################################################################################## - ############################ SQLLOGIC TEST ################################################### - ############################################################################################## - SQLLogicTestRelease: - needs: [BuilderDebRelease] - runs-on: [self-hosted, func-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqllogic_debug - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=Sqllogic test (release) - REPO_COPY=${{runner.temp}}/sqllogic_debug/ClickHouse - KILL_TIMEOUT=10800 - EOF - - name: Download json reports - uses: actions/download-artifact@v2 - with: - path: ${{ env.REPORTS_PATH }} - - name: Clear repository - run: | - sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE" - - name: Check out repository code - uses: actions/checkout@v2 - - name: Sqllogic test - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqllogic_test.py "$CHECK_NAME" "$KILL_TIMEOUT" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" -############################################################################################## -##################################### SQL TEST ############################################### -############################################################################################## - SQLTest: - needs: [BuilderDebRelease] - runs-on: [self-hosted, fuzzer-unit-tester] - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/sqltest - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=SQLTest - REPO_COPY=${{runner.temp}}/sqltest/ClickHouse - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: SQLTest - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH" - cd "$REPO_COPY/tests/ci" - python3 sqltest.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" From 025c3cf37b0dba689602149da5cfefbfe26ae961 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sat, 11 Nov 2023 01:27:09 +0100 Subject: [PATCH 77/99] Trigger docs check --- docker/docs/builder/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/docs/builder/Dockerfile b/docker/docs/builder/Dockerfile index 3ca2bdafcb3..b7b706a8a5c 100644 --- a/docker/docs/builder/Dockerfile +++ b/docker/docs/builder/Dockerfile @@ -4,8 +4,8 @@ FROM node:16-alpine RUN apk add --no-cache git openssh bash -# At this point we want to really update /opt/clickhouse-docs -# despite the cached images +# At this point we want to really update /opt/clickhouse-docs directory +# So we reset the cache ARG CACHE_INVALIDATOR=0 RUN git clone https://github.com/ClickHouse/clickhouse-docs.git \ From 8a36f2fa4caee42a382a1f9d7e3419675348d68a Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Sun, 12 Nov 2023 10:12:04 +0100 Subject: [PATCH 78/99] Rewrite builds reports to reusable workflow --- .github/workflows/backport_branches.yml | 92 +++++++----------------- .github/workflows/master.yml | 93 +++++++------------------ .github/workflows/pull_request.yml | 92 +++++++----------------- .github/workflows/release_branches.yml | 93 +++++++------------------ tests/ci/build_report_check.py | 9 ++- 5 files changed, 104 insertions(+), 275 deletions(-) diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 1f0e424bf5c..05ade29dfa6 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -195,84 +195,40 @@ jobs: ##################################### BUILD REPORTER ####################################### ############################################################################################ BuilderReport: + if: ${{ success() || failure() }} needs: - BuilderDebRelease - BuilderDebAarch64 - BuilderDebAsan - BuilderDebTsan - BuilderDebDebug - runs-on: [self-hosted, style-checker] - if: ${{ success() || failure() }} - steps: - - name: Set envs - run: | - cat >> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse special build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse special build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse special build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - CHECK_NAME=ClickHouse build check - REPORTS_PATH=${{runner.temp}}/reports_dir - REPORTS_PATH=${{runner.temp}}/reports_dir - TEMP_PATH=${{runner.temp}}/report_check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA<> "$GITHUB_ENV" << 'EOF' - TEMP_PATH=${{runner.temp}}/report_check - REPORTS_PATH=${{runner.temp}}/reports_dir - CHECK_NAME=ClickHouse special build check - NEEDS_DATA_PATH=${{runner.temp}}/needs.json - EOF - - name: Download json reports - uses: actions/download-artifact@v3 - with: - path: ${{ env.REPORTS_PATH }} - - name: Check out repository code - uses: ClickHouse/checkout@v1 - with: - clear-repository: true - - name: Report Builder - run: | - sudo rm -fr "$TEMP_PATH" - mkdir -p "$TEMP_PATH" - cat > "$NEEDS_DATA_PATH" << 'EOF' - ${{ toJSON(needs) }} - EOF - cd "$GITHUB_WORKSPACE/tests/ci" - python3 build_report_check.py "$CHECK_NAME" - - name: Cleanup - if: always() - run: | - docker ps --quiet | xargs --no-run-if-empty docker kill ||: - docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||: - sudo rm -fr "$TEMP_PATH" + uses: ./.github/workflows/reusable_test.yml + with: + test_name: ClickHouse special build check + runner_type: style-checker + additional_envs: | + NEEDS_DATA< Date: Sun, 12 Nov 2023 10:14:54 +0100 Subject: [PATCH 79/99] Clean all `batches: 1` default values --- .github/workflows/backport_branches.yml | 3 -- .github/workflows/docs_check.yml | 2 -- .github/workflows/jepsen.yml | 2 -- .github/workflows/libfuzzer.yml | 1 - .github/workflows/master.yml | 29 ---------------- .github/workflows/pull_request.yml | 45 ------------------------- .github/workflows/release_branches.yml | 14 -------- 7 files changed, 96 deletions(-) diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index 05ade29dfa6..f6af4778cf1 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -261,7 +261,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -276,7 +275,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -289,7 +287,6 @@ jobs: with: test_name: Stress test (tsan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" diff --git a/.github/workflows/docs_check.yml b/.github/workflows/docs_check.yml index d7699f0419d..6d449e74f30 100644 --- a/.github/workflows/docs_check.yml +++ b/.github/workflows/docs_check.yml @@ -102,7 +102,6 @@ jobs: with: test_name: Style check runner_type: style-checker - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 style_check.py @@ -118,7 +117,6 @@ jobs: test_name: Docs check runner_type: func-tester-aarch64 additional_envs: | - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 docs_check.py diff --git a/.github/workflows/jepsen.yml b/.github/workflows/jepsen.yml index 5ec038231ec..163de7769af 100644 --- a/.github/workflows/jepsen.yml +++ b/.github/workflows/jepsen.yml @@ -15,7 +15,6 @@ jobs: with: test_name: Jepsen keeper check runner_type: style-checker - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 jepsen_check.py keeper @@ -25,7 +24,6 @@ jobs: # with: # test_name: Jepsen server check # runner_type: style-checker - # batches: 1 # run_command: | # cd "$REPO_COPY/tests/ci" # python3 jepsen_check.py server diff --git a/.github/workflows/libfuzzer.yml b/.github/workflows/libfuzzer.yml index aabf6275c05..1ca637c0d84 100644 --- a/.github/workflows/libfuzzer.yml +++ b/.github/workflows/libfuzzer.yml @@ -21,7 +21,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 libfuzzer_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 50fcfc79572..260fc2fc7d5 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -94,7 +94,6 @@ jobs: with: test_name: Style check runner_type: style-checker - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 style_check.py @@ -349,7 +348,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -361,7 +359,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -397,7 +394,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -409,7 +405,6 @@ jobs: runner_type: func-tester-aarch64 additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -484,7 +479,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -496,7 +490,6 @@ jobs: runner_type: func-tester-aarch64 additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -508,7 +501,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -520,7 +512,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -532,7 +523,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -544,7 +534,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -556,7 +545,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -569,7 +557,6 @@ jobs: with: test_name: Stress test (asan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -579,7 +566,6 @@ jobs: with: test_name: Stress test (tsan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -589,7 +575,6 @@ jobs: with: test_name: Stress test (msan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -599,7 +584,6 @@ jobs: with: test_name: Stress test (ubsan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -609,7 +593,6 @@ jobs: with: test_name: Stress test (debug) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -665,7 +648,6 @@ jobs: with: test_name: AST fuzzer (asan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -675,7 +657,6 @@ jobs: with: test_name: AST fuzzer (tsan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -685,7 +666,6 @@ jobs: with: test_name: AST fuzzer (ubsan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -695,7 +675,6 @@ jobs: with: test_name: AST fuzzer (msan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -705,7 +684,6 @@ jobs: with: test_name: AST fuzzer (debug) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -718,7 +696,6 @@ jobs: with: test_name: Unit tests (asan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -728,7 +705,6 @@ jobs: with: test_name: Unit tests (release) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -738,7 +714,6 @@ jobs: with: test_name: Unit tests (tsan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -748,7 +723,6 @@ jobs: with: test_name: Unit tests (msan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -758,7 +732,6 @@ jobs: with: test_name: Unit tests (ubsan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -794,7 +767,6 @@ jobs: with: test_name: SQLancer (release) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 sqlancer_check.py "$CHECK_NAME" @@ -804,7 +776,6 @@ jobs: with: test_name: SQLancer (debug) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 sqlancer_check.py "$CHECK_NAME" diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 1c12ddca07c..f8f052d9226 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -124,7 +124,6 @@ jobs: with: test_name: Style check runner_type: style-checker - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 style_check.py @@ -139,7 +138,6 @@ jobs: with: test_name: Fast tests runner_type: builder - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 fast_test_check.py @@ -368,7 +366,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -392,7 +389,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -404,7 +400,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -452,7 +447,6 @@ jobs: runner_type: func-tester-aarch64 additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -524,7 +518,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -536,7 +529,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" @@ -562,7 +554,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -574,7 +565,6 @@ jobs: runner_type: func-tester-aarch64 additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -586,7 +576,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -598,7 +587,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -610,7 +598,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -622,7 +609,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -634,7 +620,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -647,7 +632,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -659,7 +643,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -671,7 +654,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -683,7 +665,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -695,7 +676,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -707,7 +687,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -720,7 +699,6 @@ jobs: with: test_name: Stress test (asan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -730,7 +708,6 @@ jobs: with: test_name: Stress test (tsan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -740,7 +717,6 @@ jobs: with: test_name: Stress test (msan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -750,7 +726,6 @@ jobs: with: test_name: Stress test (ubsan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -760,7 +735,6 @@ jobs: with: test_name: Stress test (debug) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -773,7 +747,6 @@ jobs: with: test_name: Upgrade check (asan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 upgrade_check.py "$CHECK_NAME" @@ -783,7 +756,6 @@ jobs: with: test_name: Upgrade check (tsan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 upgrade_check.py "$CHECK_NAME" @@ -793,7 +765,6 @@ jobs: with: test_name: Upgrade check (msan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 upgrade_check.py "$CHECK_NAME" @@ -803,7 +774,6 @@ jobs: with: test_name: Upgrade check (debug) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 upgrade_check.py "$CHECK_NAME" @@ -816,7 +786,6 @@ jobs: with: test_name: AST fuzzer (asan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -826,7 +795,6 @@ jobs: with: test_name: AST fuzzer (tsan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -836,7 +804,6 @@ jobs: with: test_name: AST fuzzer (ubsan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -846,7 +813,6 @@ jobs: with: test_name: AST fuzzer (msan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -856,7 +822,6 @@ jobs: with: test_name: AST fuzzer (debug) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 ast_fuzzer_check.py "$CHECK_NAME" @@ -909,7 +874,6 @@ jobs: with: test_name: Integration tests flaky check (asan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 integration_test_check.py "$CHECK_NAME" @@ -922,7 +886,6 @@ jobs: with: test_name: Unit tests (asan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -932,7 +895,6 @@ jobs: with: test_name: Unit tests (release) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -942,7 +904,6 @@ jobs: with: test_name: Unit tests (tsan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -952,7 +913,6 @@ jobs: with: test_name: Unit tests (msan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -962,7 +922,6 @@ jobs: with: test_name: Unit tests (ubsan) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 unit_tests_check.py "$CHECK_NAME" @@ -998,7 +957,6 @@ jobs: with: test_name: SQLancer (release) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 sqlancer_check.py "$CHECK_NAME" @@ -1008,7 +966,6 @@ jobs: with: test_name: SQLancer (debug) runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 sqlancer_check.py "$CHECK_NAME" @@ -1099,7 +1056,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 sqllogic_test.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -1112,7 +1068,6 @@ jobs: with: test_name: SQLTest runner_type: fuzzer-unit-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 sqltest.py "$CHECK_NAME" diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f4169f0ffef..b5771fa87ab 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -260,7 +260,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -272,7 +271,6 @@ jobs: runner_type: func-tester-aarch64 additional_envs: | KILL_TIMEOUT=10800 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -347,7 +345,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -359,7 +356,6 @@ jobs: runner_type: func-tester-aarch64 additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -371,7 +367,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -383,7 +378,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -395,7 +389,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -407,7 +400,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -419,7 +411,6 @@ jobs: runner_type: func-tester additional_envs: | KILL_TIMEOUT=3600 - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT" @@ -432,7 +423,6 @@ jobs: with: test_name: Stress test (asan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -442,7 +432,6 @@ jobs: with: test_name: Stress test (tsan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -452,7 +441,6 @@ jobs: with: test_name: Stress test (msan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -462,7 +450,6 @@ jobs: with: test_name: Stress test (ubsan) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" @@ -472,7 +459,6 @@ jobs: with: test_name: Stress test (debug) runner_type: stress-tester - batches: 1 run_command: | cd "$REPO_COPY/tests/ci" python3 stress_check.py "$CHECK_NAME" From d0e5a55fb51a22b34b37f889882ab6eb6cc36430 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Mon, 13 Nov 2023 07:41:17 +0100 Subject: [PATCH 80/99] Fix condition for Clean actions, execute it always --- .github/workflows/reusable_build.yml | 1 + .github/workflows/reusable_test.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 7b88c2b9925..57c7354c183 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -71,4 +71,5 @@ jobs: name: ${{ env.BUILD_URLS }} path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json - name: Clean + if: always() uses: ./.github/actions/clean diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 04b737d2131..08c1e431f4b 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -97,4 +97,5 @@ jobs: - name: Run test run: ${{inputs.run_command}} - name: Clean + if: always() uses: ./.github/actions/clean From c0a3c025648535d75f5c97a60c6deb52f3df0643 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Mon, 13 Nov 2023 09:43:09 +0100 Subject: [PATCH 81/99] Skip unnecessary PrepareStrategy jobs --- .github/workflows/reusable_test.yml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 08c1e431f4b..3363576e90c 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -44,7 +44,9 @@ env: jobs: PrepareStrategy: - if: ${{inputs.batches > 0}} # batches < 1 is misconfiguration + # batches < 1 is misconfiguration, + # and we need this step only for batches > 1 + if: ${{ inputs.batches > 1 }} runs-on: [self-hosted, style-checker-aarch64] outputs: batches: ${{steps.batches.outputs.batches}} @@ -55,6 +57,11 @@ jobs: batches_output=$(python3 -c 'import json; print(json.dumps(list(range(${{inputs.batches}}))))') echo "batches=${batches_output}" >> "$GITHUB_OUTPUT" Test: + # If PrepareStrategy is skipped for batches == 1, + # we still need to launch the test. + # `! failure()` is mandatory here to launch on skipped Job + # `&& !cancelled()` to allow the be cancelable + if: ${{ ( !failure() && !cancelled() ) && inputs.batches > 0 }} # Do not add `-0` to the end, if there's only one batch name: ${{inputs.test_name}}${{ inputs.batches > 1 && format('-{0}',matrix.batch) || '' }} env: @@ -64,7 +71,10 @@ jobs: strategy: fail-fast: false # we always wait for entire matrix matrix: - batch: ${{ fromJson(needs.PrepareStrategy.outputs.batches) }} + # if PrepareStrategy does not have batches, we use 0 + batch: ${{ needs.PrepareStrategy.outputs.batches + && fromJson(needs.PrepareStrategy.outputs.batches) + || fromJson('[0]')}} steps: - name: Check out repository code uses: ClickHouse/checkout@v1 From f72c628fbc67f4ce409900844b18d4eb70433069 Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Mon, 13 Nov 2023 11:06:03 +0100 Subject: [PATCH 82/99] Add forgotten PYTHONUNBUFFERED global env to reusable workflows --- .github/workflows/reusable_build.yml | 4 ++++ .github/workflows/reusable_test.yml | 2 ++ 2 files changed, 6 insertions(+) diff --git a/.github/workflows/reusable_build.yml b/.github/workflows/reusable_build.yml index 57c7354c183..f36b93bea58 100644 --- a/.github/workflows/reusable_build.yml +++ b/.github/workflows/reusable_build.yml @@ -1,6 +1,10 @@ ### For the pure soul wishes to move it to another place # https://github.com/orgs/community/discussions/9050 +env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 + name: Build ClickHouse 'on': workflow_call: diff --git a/.github/workflows/reusable_test.yml b/.github/workflows/reusable_test.yml index 3363576e90c..e82d2d51596 100644 --- a/.github/workflows/reusable_test.yml +++ b/.github/workflows/reusable_test.yml @@ -40,6 +40,8 @@ name: Testing workflow required: false env: + # Force the stdout and stderr streams to be unbuffered + PYTHONUNBUFFERED: 1 CHECK_NAME: ${{inputs.test_name}} jobs: From c93efc929af894d776fd2ff92248acbcc869f106 Mon Sep 17 00:00:00 2001 From: Denny Crane Date: Mon, 13 Nov 2023 16:54:21 +0300 Subject: [PATCH 83/99] fix typo in the doc --- docs/en/sql-reference/dictionaries/index.md | 4 ++-- .../external-dictionaries/external-dicts-dict-sources.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/en/sql-reference/dictionaries/index.md b/docs/en/sql-reference/dictionaries/index.md index 7d8d04f80a0..4f021b25809 100644 --- a/docs/en/sql-reference/dictionaries/index.md +++ b/docs/en/sql-reference/dictionaries/index.md @@ -1769,7 +1769,7 @@ Example of settings: qwerty123 database_name table_name - 1 + 1 1 One "SomeColumn" = 42 @@ -1787,7 +1787,7 @@ Setting fields: - `password` – Password of the Cassandra user. - `keyspace` – Name of the keyspace (database). - `column_family` – Name of the column family (table). -- `allow_filering` – Flag to allow or not potentially expensive conditions on clustering key columns. Default value is 1. +- `allow_filtering` – Flag to allow or not potentially expensive conditions on clustering key columns. Default value is 1. - `partition_key_prefix` – Number of partition key columns in primary key of the Cassandra table. Required for compose key dictionaries. Order of key columns in the dictionary definition must be the same as in Cassandra. Default value is 1 (the first key column is a partition key and other key columns are clustering key). - `consistency` – Consistency level. Possible values: `One`, `Two`, `Three`, `All`, `EachQuorum`, `Quorum`, `LocalQuorum`, `LocalOne`, `Serial`, `LocalSerial`. Default value is `One`. - `where` – Optional selection criteria. diff --git a/docs/ru/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-sources.md b/docs/ru/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-sources.md index 5bd0d9ed206..9bd4c48c6a0 100644 --- a/docs/ru/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-sources.md +++ b/docs/ru/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-sources.md @@ -723,7 +723,7 @@ SOURCE(REDIS( qwerty123 database_name table_name - 1 + 1 1 One "SomeColumn" = 42 @@ -741,7 +741,7 @@ SOURCE(REDIS( - `password` – пароль для соединения с Cassandra. - `keyspace` – имя keyspace (база данных). - `column_family` – имя семейства столбцов (таблица). -- `allow_filering` – флаг, разрешающий или не разрешающий потенциально дорогостоящие условия на кластеризации ключевых столбцов. Значение по умолчанию: 1. +- `allow_filtering` – флаг, разрешающий или не разрешающий потенциально дорогостоящие условия на кластеризации ключевых столбцов. Значение по умолчанию: 1. - `partition_key_prefix` – количество партиций ключевых столбцов в первичном ключе таблицы Cassandra. Необходимо для составления ключей словаря. Порядок ключевых столбцов в определении словаря должен быть таким же, как в Cassandra. Значение по умолчанию: 1 (первый ключевой столбец - это ключ партицирования, остальные ключевые столбцы - ключи кластеризации). From 951d9ac1446d990f6da476217eeb5330ad5b8137 Mon Sep 17 00:00:00 2001 From: SmitaRKulkarni Date: Mon, 13 Nov 2023 15:44:06 +0100 Subject: [PATCH 84/99] Disable randomization of allow_experimental_block_number_column flag (#56474) Co-authored-by: Alexander Tokmakov --- docker/test/stateless/stress_tests.lib | 2 -- tests/config/config.d/merge_tree_settings.xml | 2 -- 2 files changed, 4 deletions(-) diff --git a/docker/test/stateless/stress_tests.lib b/docker/test/stateless/stress_tests.lib index 2309e307324..fc35bff5e40 100644 --- a/docker/test/stateless/stress_tests.lib +++ b/docker/test/stateless/stress_tests.lib @@ -74,8 +74,6 @@ function configure() randomize_config_boolean_value use_compression zookeeper - randomize_config_boolean_value allow_experimental_block_number_column merge_tree_settings - # for clickhouse-server (via service) echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment # for clickhouse-client diff --git a/tests/config/config.d/merge_tree_settings.xml b/tests/config/config.d/merge_tree_settings.xml index 7dcea114b90..f277c18fa3f 100644 --- a/tests/config/config.d/merge_tree_settings.xml +++ b/tests/config/config.d/merge_tree_settings.xml @@ -2,7 +2,5 @@ 10 - - 0 From 993f7529001db2872394da5b2b36e8e45fc69d64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Mar=C3=ADn?= Date: Mon, 13 Nov 2023 12:53:09 +0000 Subject: [PATCH 85/99] Revert "Merge pull request #56543 from rschu1ze/bump-grpc" This reverts commit 213c7cffb5eae1951f67b8531ec69262696c7e3d, reversing changes made to 9ed47749dee12e900875ff4c6214a177fa07a94c. --- contrib/grpc | 2 +- contrib/sparse-checkout/update-grpc.sh | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/contrib/grpc b/contrib/grpc index b723ecae099..267af8c3a1e 160000 --- a/contrib/grpc +++ b/contrib/grpc @@ -1 +1 @@ -Subproject commit b723ecae0991bb873fe87a595dfb187178733fde +Subproject commit 267af8c3a1ea4a5a4d9e5a070ad2d1ac7c701923 diff --git a/contrib/sparse-checkout/update-grpc.sh b/contrib/sparse-checkout/update-grpc.sh index 21628ce8dd1..4571bd6307d 100755 --- a/contrib/sparse-checkout/update-grpc.sh +++ b/contrib/sparse-checkout/update-grpc.sh @@ -7,7 +7,6 @@ echo '/*' > $FILES_TO_CHECKOUT echo '!/test/*' >> $FILES_TO_CHECKOUT echo '/test/build/*' >> $FILES_TO_CHECKOUT echo '/test/core/tsi/alts/fake_handshaker/*' >> $FILES_TO_CHECKOUT -echo '/test/core/event_engine/fuzzing_event_engine/*' >> $FILES_TO_CHECKOUT echo '!/tools/*' >> $FILES_TO_CHECKOUT echo '/tools/codegen/*' >> $FILES_TO_CHECKOUT echo '!/examples/*' >> $FILES_TO_CHECKOUT From 51e8c58a38687698eeb63af7bf18f48c9f1ed854 Mon Sep 17 00:00:00 2001 From: Robert Schulze Date: Mon, 13 Nov 2023 14:12:16 +0000 Subject: [PATCH 86/99] Revert "Revert "Merge pull request #56543 from rschu1ze/bump-grpc"" This reverts commit d4840adfd4708729ed8f969e61116f58ffe2fb18. --- contrib/grpc | 2 +- contrib/sparse-checkout/update-grpc.sh | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/contrib/grpc b/contrib/grpc index 267af8c3a1e..b723ecae099 160000 --- a/contrib/grpc +++ b/contrib/grpc @@ -1 +1 @@ -Subproject commit 267af8c3a1ea4a5a4d9e5a070ad2d1ac7c701923 +Subproject commit b723ecae0991bb873fe87a595dfb187178733fde diff --git a/contrib/sparse-checkout/update-grpc.sh b/contrib/sparse-checkout/update-grpc.sh index 4571bd6307d..21628ce8dd1 100755 --- a/contrib/sparse-checkout/update-grpc.sh +++ b/contrib/sparse-checkout/update-grpc.sh @@ -7,6 +7,7 @@ echo '/*' > $FILES_TO_CHECKOUT echo '!/test/*' >> $FILES_TO_CHECKOUT echo '/test/build/*' >> $FILES_TO_CHECKOUT echo '/test/core/tsi/alts/fake_handshaker/*' >> $FILES_TO_CHECKOUT +echo '/test/core/event_engine/fuzzing_event_engine/*' >> $FILES_TO_CHECKOUT echo '!/tools/*' >> $FILES_TO_CHECKOUT echo '/tools/codegen/*' >> $FILES_TO_CHECKOUT echo '!/examples/*' >> $FILES_TO_CHECKOUT From ed7bfeff6999e69e749acacd1d9e20e2828ccb12 Mon Sep 17 00:00:00 2001 From: Robert Schulze Date: Mon, 13 Nov 2023 14:14:13 +0000 Subject: [PATCH 87/99] Update gRPC to not include systemd --- contrib/grpc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/grpc b/contrib/grpc index b723ecae099..6e5e645de7c 160000 --- a/contrib/grpc +++ b/contrib/grpc @@ -1 +1 @@ -Subproject commit b723ecae0991bb873fe87a595dfb187178733fde +Subproject commit 6e5e645de7cb0604e3ad4ba55abff2eca38c1803 From 73d058967a94cab6ec52152bea9c7fe50bc0d2ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Mar=C3=ADn?= Date: Mon, 13 Nov 2023 12:07:43 +0000 Subject: [PATCH 88/99] Add back flaky tests to analyzer_tech_debt.txt --- tests/analyzer_tech_debt.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/analyzer_tech_debt.txt b/tests/analyzer_tech_debt.txt index 6021e7ccf38..8cb921bdd19 100644 --- a/tests/analyzer_tech_debt.txt +++ b/tests/analyzer_tech_debt.txt @@ -11,6 +11,7 @@ 01244_optimize_distributed_group_by_sharding_key 01268_mv_scalars 01268_shard_avgweighted +01287_max_execution_speed 01455_shard_leaf_max_rows_bytes_to_read 01495_subqueries_in_with_statement 01560_merge_distributed_join @@ -42,6 +43,8 @@ 00917_multiple_joins_denny_crane 00636_partition_key_parts_pruning 01825_type_json_multiple_files +02003_WithMergeableStateAfterAggregationAndLimit_LIMIT_BY_LIMIT_OFFSET +02404_memory_bound_merging 02725_agg_projection_resprect_PK 02721_url_cluster 02534_s3_cluster_insert_select_schema_inference From 2ad98a58c9a744ccd43651ed1c14bcf4f07182b2 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 13 Nov 2023 16:51:01 +0100 Subject: [PATCH 89/99] Better caching of arrow build (#56657) * Better caching of Arrow build * Fix error * Remove useless files --- contrib/arrow-cmake/CMakeLists.txt | 9 ++- .../arrow-cmake/cpp/src/arrow/util/config.h | 61 +++++++++++++++++++ .../cpp/src/orc/c++/include/orc/orc-config.hh | 38 ++++++++++++ src/Processors/examples/CMakeLists.txt | 6 -- src/Processors/examples/native_orc.cpp | 36 ----------- .../examples/processors_test_aggregation.cpp | 0 ...rocessors_test_merge_sorting_transform.cpp | 0 7 files changed, 103 insertions(+), 47 deletions(-) create mode 100644 contrib/arrow-cmake/cpp/src/arrow/util/config.h create mode 100644 contrib/arrow-cmake/cpp/src/orc/c++/include/orc/orc-config.hh delete mode 100644 src/Processors/examples/native_orc.cpp delete mode 100644 src/Processors/examples/processors_test_aggregation.cpp delete mode 100644 src/Processors/examples/processors_test_merge_sorting_transform.cpp diff --git a/contrib/arrow-cmake/CMakeLists.txt b/contrib/arrow-cmake/CMakeLists.txt index c45d75bb3f2..71133451889 100644 --- a/contrib/arrow-cmake/CMakeLists.txt +++ b/contrib/arrow-cmake/CMakeLists.txt @@ -109,7 +109,6 @@ set (ORC_CXX_HAS_CSTDINT 1) set (ORC_CXX_HAS_THREAD_LOCAL 1) include(orc_check.cmake) -configure_file("${ORC_INCLUDE_DIR}/orc/orc-config.hh.in" "${ORC_BUILD_INCLUDE_DIR}/orc/orc-config.hh") configure_file("${ORC_SOURCE_SRC_DIR}/Adaptor.hh.in" "${ORC_BUILD_INCLUDE_DIR}/Adaptor.hh") @@ -198,7 +197,9 @@ target_link_libraries(_orc PRIVATE ch_contrib::snappy ch_contrib::zlib ch_contrib::zstd) -target_include_directories(_orc SYSTEM BEFORE PUBLIC ${ORC_INCLUDE_DIR}) +target_include_directories(_orc SYSTEM BEFORE PUBLIC + ${ORC_INCLUDE_DIR} + "${ClickHouse_SOURCE_DIR}/contrib/arrow-cmake/cpp/src/orc/c++/include") target_include_directories(_orc SYSTEM BEFORE PUBLIC ${ORC_BUILD_INCLUDE_DIR}) target_include_directories(_orc SYSTEM PRIVATE ${ORC_SOURCE_SRC_DIR} @@ -212,8 +213,6 @@ target_include_directories(_orc SYSTEM PRIVATE set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src/arrow") -configure_file("${LIBRARY_DIR}/util/config.h.cmake" "${CMAKE_CURRENT_BINARY_DIR}/cpp/src/arrow/util/config.h") - # arrow/cpp/src/arrow/CMakeLists.txt (ARROW_SRCS + ARROW_COMPUTE + ARROW_IPC) set(ARROW_SRCS "${LIBRARY_DIR}/array/array_base.cc" @@ -450,7 +449,7 @@ target_link_libraries(_arrow PUBLIC _orc) add_dependencies(_arrow protoc) target_include_directories(_arrow SYSTEM BEFORE PUBLIC ${ARROW_SRC_DIR}) -target_include_directories(_arrow SYSTEM BEFORE PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/cpp/src") +target_include_directories(_arrow SYSTEM BEFORE PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow-cmake/cpp/src") target_include_directories(_arrow SYSTEM PRIVATE ${ARROW_SRC_DIR}) target_include_directories(_arrow SYSTEM PRIVATE ${HDFS_INCLUDE_DIR}) diff --git a/contrib/arrow-cmake/cpp/src/arrow/util/config.h b/contrib/arrow-cmake/cpp/src/arrow/util/config.h new file mode 100644 index 00000000000..cacff7b16cb --- /dev/null +++ b/contrib/arrow-cmake/cpp/src/arrow/util/config.h @@ -0,0 +1,61 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#define ARROW_VERSION_MAJOR 11 +#define ARROW_VERSION_MINOR 0 +#define ARROW_VERSION_PATCH 0 +#define ARROW_VERSION ((ARROW_VERSION_MAJOR * 1000) + ARROW_VERSION_MINOR) * 1000 + ARROW_VERSION_PATCH + +#define ARROW_VERSION_STRING "11.0.0" + +#define ARROW_SO_VERSION "1100" +#define ARROW_FULL_SO_VERSION "1100.0.0" + +#define ARROW_CXX_COMPILER_ID "Clang" +#define ARROW_CXX_COMPILER_VERSION "ClickHouse" +#define ARROW_CXX_COMPILER_FLAGS "" + +#define ARROW_BUILD_TYPE "" + +#define ARROW_GIT_ID "" +#define ARROW_GIT_DESCRIPTION "" + +#define ARROW_PACKAGE_KIND "" + +/* #undef ARROW_COMPUTE */ +/* #undef ARROW_CSV */ +/* #undef ARROW_CUDA */ +/* #undef ARROW_DATASET */ +/* #undef ARROW_FILESYSTEM */ +/* #undef ARROW_FLIGHT */ +/* #undef ARROW_FLIGHT_SQL */ +/* #undef ARROW_IPC */ +/* #undef ARROW_JEMALLOC */ +/* #undef ARROW_JEMALLOC_VENDORED */ +/* #undef ARROW_JSON */ +/* #undef ARROW_ORC */ +/* #undef ARROW_PARQUET */ +/* #undef ARROW_SUBSTRAIT */ + +/* #undef ARROW_GCS */ +/* #undef ARROW_S3 */ +/* #undef ARROW_USE_NATIVE_INT128 */ +/* #undef ARROW_WITH_MUSL */ +/* #undef ARROW_WITH_OPENTELEMETRY */ +/* #undef ARROW_WITH_UCX */ + +/* #undef GRPCPP_PP_INCLUDE */ diff --git a/contrib/arrow-cmake/cpp/src/orc/c++/include/orc/orc-config.hh b/contrib/arrow-cmake/cpp/src/orc/c++/include/orc/orc-config.hh new file mode 100644 index 00000000000..1b0f71ddd40 --- /dev/null +++ b/contrib/arrow-cmake/cpp/src/orc/c++/include/orc/orc-config.hh @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ORC_CONFIG_HH +#define ORC_CONFIG_HH + +#define ORC_VERSION "" + +#define ORC_CXX_HAS_CSTDINT + +#ifdef ORC_CXX_HAS_CSTDINT + #include +#else + #include +#endif + +// Following MACROS should be keeped for backward compatibility. +#define ORC_NOEXCEPT noexcept +#define ORC_NULLPTR nullptr +#define ORC_OVERRIDE override +#define ORC_UNIQUE_PTR std::unique_ptr + +#endif diff --git a/src/Processors/examples/CMakeLists.txt b/src/Processors/examples/CMakeLists.txt index 0c8734aee3c..5d43a0d7d08 100644 --- a/src/Processors/examples/CMakeLists.txt +++ b/src/Processors/examples/CMakeLists.txt @@ -2,9 +2,3 @@ if (TARGET ch_contrib::hivemetastore) clickhouse_add_executable (comma_separated_streams comma_separated_streams.cpp) target_link_libraries (comma_separated_streams PRIVATE dbms) endif() - -if (USE_ORC) - clickhouse_add_executable (native_orc native_orc.cpp) - target_link_libraries (native_orc PRIVATE dbms) - target_include_directories (native_orc PRIVATE ${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include) -endif () diff --git a/src/Processors/examples/native_orc.cpp b/src/Processors/examples/native_orc.cpp deleted file mode 100644 index 201e87b1f56..00000000000 --- a/src/Processors/examples/native_orc.cpp +++ /dev/null @@ -1,36 +0,0 @@ -#include -#include -#include -#include - -using namespace DB; - -int main() -{ - /// Read schema from orc file - String path = "/path/to/orc/file"; - // String path = "/data1/clickhouse_official/data/user_files/bigolive_audience_stats_orc.orc"; - { - ReadBufferFromFile in(path); - NativeORCSchemaReader schema_reader(in, {}); - auto schema = schema_reader.readSchema(); - std::cout << "schema:" << schema.toString() << std::endl; - } - - /// Read schema from string with orc data - { - ReadBufferFromFile in(path); - - String content; - WriteBufferFromString out(content); - - copyData(in, out); - - content.resize(out.count()); - ReadBufferFromString in2(content); - NativeORCSchemaReader schema_reader(in2, {}); - auto schema = schema_reader.readSchema(); - std::cout << "schema:" << schema.toString() << std::endl; - } - return 0; -} diff --git a/src/Processors/examples/processors_test_aggregation.cpp b/src/Processors/examples/processors_test_aggregation.cpp deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/Processors/examples/processors_test_merge_sorting_transform.cpp b/src/Processors/examples/processors_test_merge_sorting_transform.cpp deleted file mode 100644 index e69de29bb2d..00000000000 From 38d4d669981f8760b0aa1ddb1d2172777db81e3f Mon Sep 17 00:00:00 2001 From: "Mikhail f. Shiryaev" Date: Mon, 13 Nov 2023 18:43:42 +0100 Subject: [PATCH 90/99] Fix missing argument for style_check.py in master workflow --- .github/workflows/master.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 260fc2fc7d5..e662a5b6f98 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -96,7 +96,7 @@ jobs: runner_type: style-checker run_command: | cd "$REPO_COPY/tests/ci" - python3 style_check.py + python3 style_check.py --no-push CompatibilityCheckX86: needs: [BuilderDebRelease] uses: ./.github/workflows/reusable_test.yml From d92e3329234005483a5f6f77baf7c03afa2b96bb Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 13 Nov 2023 19:33:56 +0100 Subject: [PATCH 91/99] Fix build --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 9c8952aea96..9e548c5a6d0 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -554,7 +554,7 @@ if (ENABLE_RUST) endif() endif() -if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO") +if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" AND NOT SANITIZE AND OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64)) set(CHECK_LARGE_OBJECT_SIZES_DEFAULT ON) else () set(CHECK_LARGE_OBJECT_SIZES_DEFAULT OFF) From 0622379ec7079174bc0f1f34c27eff9ea5cb0721 Mon Sep 17 00:00:00 2001 From: Alexander Tokmakov Date: Mon, 13 Nov 2023 21:11:07 +0100 Subject: [PATCH 92/99] Revert "Add a setting max_execution_time_leaf to limit the execution time on shard for distributed query" --- .../operations/settings/query-complexity.md | 22 +------------------ src/Core/Settings.h | 16 ++++++-------- .../ClusterProxy/SelectStreamFactory.cpp | 13 +++++------ .../ClusterProxy/executeQuery.cpp | 8 ------- .../02786_max_execution_time_leaf.reference | 0 .../02786_max_execution_time_leaf.sql | 4 ---- 6 files changed, 13 insertions(+), 50 deletions(-) delete mode 100644 tests/queries/0_stateless/02786_max_execution_time_leaf.reference delete mode 100644 tests/queries/0_stateless/02786_max_execution_time_leaf.sql diff --git a/docs/en/operations/settings/query-complexity.md b/docs/en/operations/settings/query-complexity.md index 9e36aa26946..2211b0c2de2 100644 --- a/docs/en/operations/settings/query-complexity.md +++ b/docs/en/operations/settings/query-complexity.md @@ -172,27 +172,7 @@ If you set `timeout_before_checking_execution_speed `to 0, ClickHouse will use c ## timeout_overflow_mode {#timeout-overflow-mode} -What to do if the query is run longer than `max_execution_time`: `throw` or `break`. By default, `throw`. - -# max_execution_time_leaf - -Similar semantic to `max_execution_time` but only apply on leaf node for distributed or remote queries. - -For example, if we want to limit execution time on leaf node to `10s` but no limit on the initial node, instead of having `max_execution_time` in the nested subquery settings: - -``` sql -SELECT count() FROM cluster(cluster, view(SELECT * FROM t SETTINGS max_execution_time = 10)); -``` - -We can use `max_execution_time_leaf` as the query settings: - -``` sql -SELECT count() FROM cluster(cluster, view(SELECT * FROM t)) SETTINGS max_execution_time_leaf = 10; -``` - -# timeout_overflow_mode_leaf - -What to do when the query in leaf node run longer than `max_execution_time_leaf`: `throw` or `break`. By default, `throw`. +What to do if the query is run longer than ‘max_execution_time’: ‘throw’ or ‘break’. By default, throw. ## min_execution_speed {#min-execution-speed} diff --git a/src/Core/Settings.h b/src/Core/Settings.h index 0c9c1d21852..0a26e4ef5f3 100644 --- a/src/Core/Settings.h +++ b/src/Core/Settings.h @@ -364,16 +364,16 @@ class IColumn; M(UInt64, max_bytes_to_read, 0, "Limit on read bytes (after decompression) from the most 'deep' sources. That is, only in the deepest subquery. When reading from a remote server, it is only checked on a remote server.", 0) \ M(OverflowMode, read_overflow_mode, OverflowMode::THROW, "What to do when the limit is exceeded.", 0) \ \ - M(UInt64, max_rows_to_read_leaf, 0, "Limit on read rows on the leaf nodes for distributed queries. Limit is applied for local reads only, excluding the final merge stage on the root node. Note, the setting is unstable with prefer_localhost_replica=1.", 0) \ - M(UInt64, max_bytes_to_read_leaf, 0, "Limit on read bytes (after decompression) on the leaf nodes for distributed queries. Limit is applied for local reads only, excluding the final merge stage on the root node. Note, the setting is unstable with prefer_localhost_replica=1.", 0) \ + M(UInt64, max_rows_to_read_leaf, 0, "Limit on read rows on the leaf nodes for distributed queries. Limit is applied for local reads only excluding the final merge stage on the root node. Note, the setting is unstable with prefer_localhost_replica=1.", 0) \ + M(UInt64, max_bytes_to_read_leaf, 0, "Limit on read bytes (after decompression) on the leaf nodes for distributed queries. Limit is applied for local reads only excluding the final merge stage on the root node. Note, the setting is unstable with prefer_localhost_replica=1.", 0) \ M(OverflowMode, read_overflow_mode_leaf, OverflowMode::THROW, "What to do when the leaf limit is exceeded.", 0) \ \ - M(UInt64, max_rows_to_group_by, 0, "If aggregation during GROUP BY is generating more than the specified number of rows (unique GROUP BY keys), the behavior will be determined by the 'group_by_overflow_mode' which by default is - throw an exception, but can be also switched to an approximate GROUP BY mode.", 0) \ + M(UInt64, max_rows_to_group_by, 0, "If aggregation during GROUP BY is generating more than specified number of rows (unique GROUP BY keys), the behavior will be determined by the 'group_by_overflow_mode' which by default is - throw an exception, but can be also switched to an approximate GROUP BY mode.", 0) \ M(OverflowModeGroupBy, group_by_overflow_mode, OverflowMode::THROW, "What to do when the limit is exceeded.", 0) \ M(UInt64, max_bytes_before_external_group_by, 0, "If memory usage during GROUP BY operation is exceeding this threshold in bytes, activate the 'external aggregation' mode (spill data to disk). Recommended value is half of available system memory.", 0) \ \ - M(UInt64, max_rows_to_sort, 0, "If more than the specified amount of records have to be processed for ORDER BY operation, the behavior will be determined by the 'sort_overflow_mode' which by default is - throw an exception", 0) \ - M(UInt64, max_bytes_to_sort, 0, "If more than the specified amount of (uncompressed) bytes have to be processed for ORDER BY operation, the behavior will be determined by the 'sort_overflow_mode' which by default is - throw an exception", 0) \ + M(UInt64, max_rows_to_sort, 0, "If more than specified amount of records have to be processed for ORDER BY operation, the behavior will be determined by the 'sort_overflow_mode' which by default is - throw an exception", 0) \ + M(UInt64, max_bytes_to_sort, 0, "If more than specified amount of (uncompressed) bytes have to be processed for ORDER BY operation, the behavior will be determined by the 'sort_overflow_mode' which by default is - throw an exception", 0) \ M(OverflowMode, sort_overflow_mode, OverflowMode::THROW, "What to do when the limit is exceeded.", 0) \ M(UInt64, max_bytes_before_external_sort, 0, "If memory usage during ORDER BY operation is exceeding this threshold in bytes, activate the 'external sorting' mode (spill data to disk). Recommended value is half of available system memory.", 0) \ M(UInt64, max_bytes_before_remerge_sort, 1000000000, "In case of ORDER BY with LIMIT, when memory usage is higher than specified threshold, perform additional steps of merging blocks before final merge to keep just top LIMIT rows.", 0) \ @@ -384,10 +384,8 @@ class IColumn; M(OverflowMode, result_overflow_mode, OverflowMode::THROW, "What to do when the limit is exceeded.", 0) \ \ /* TODO: Check also when merging and finalizing aggregate functions. */ \ - M(Seconds, max_execution_time, 0, "If query runtime exceeds the specified number of seconds, the behavior will be determined by the 'timeout_overflow_mode', which by default is - throw an exception. Note that the timeout is checked and query can stop only in designated places during data processing. It currently cannot stop during merging of aggregation states or during query analysis, and the actual run time will be higher than the value of this setting.", 0) \ + M(Seconds, max_execution_time, 0, "If query run time exceeded the specified number of seconds, the behavior will be determined by the 'timeout_overflow_mode' which by default is - throw an exception. Note that the timeout is checked and query can stop only in designated places during data processing. It currently cannot stop during merging of aggregation states or during query analysis, and the actual run time will be higher than the value of this setting.", 0) \ M(OverflowMode, timeout_overflow_mode, OverflowMode::THROW, "What to do when the limit is exceeded.", 0) \ - M(Seconds, max_execution_time_leaf, 0, "Similar semantic to max_execution_time but only apply on leaf node for distributed queries, the time out behavior will be determined by 'timeout_overflow_mode_leaf' which by default is - throw an exception", 0) \ - M(OverflowMode, timeout_overflow_mode_leaf, OverflowMode::THROW, "What to do when the leaf limit is exceeded.", 0) \ \ M(UInt64, min_execution_speed, 0, "Minimum number of execution rows per second.", 0) \ M(UInt64, max_execution_speed, 0, "Maximum number of execution rows per second.", 0) \ @@ -401,7 +399,7 @@ class IColumn; \ M(UInt64, max_sessions_for_user, 0, "Maximum number of simultaneous sessions for a user.", 0) \ \ - M(UInt64, max_subquery_depth, 100, "If a query has more than the specified number of nested subqueries, throw an exception. This allows you to have a sanity check to protect the users of your cluster from going insane with their queries.", 0) \ + M(UInt64, max_subquery_depth, 100, "If a query has more than specified number of nested subqueries, throw an exception. This allows you to have a sanity check to protect the users of your cluster from going insane with their queries.", 0) \ M(UInt64, max_analyze_depth, 5000, "Maximum number of analyses performed by interpreter.", 0) \ M(UInt64, max_ast_depth, 1000, "Maximum depth of query syntax tree. Checked after parsing.", 0) \ M(UInt64, max_ast_elements, 50000, "Maximum size of query syntax tree in number of nodes. Checked after parsing.", 0) \ diff --git a/src/Interpreters/ClusterProxy/SelectStreamFactory.cpp b/src/Interpreters/ClusterProxy/SelectStreamFactory.cpp index 4edc9d4d4e5..3935028f27c 100644 --- a/src/Interpreters/ClusterProxy/SelectStreamFactory.cpp +++ b/src/Interpreters/ClusterProxy/SelectStreamFactory.cpp @@ -1,21 +1,20 @@ +#include +#include #include #include #include #include #include -#include -#include #include #include -#include -#include #include #include #include #include + #include -#include -#include +#include +#include #include #include #include @@ -23,7 +22,6 @@ #include #include - namespace ProfileEvents { extern const Event DistributedConnectionMissingTable; @@ -123,7 +121,6 @@ void SelectStreamFactory::createForShard( if (it != objects_by_shard.end()) replaceMissedSubcolumnsByConstants(storage_snapshot->object_columns, it->second, query_ast); - auto emplace_local_stream = [&]() { local_plans.emplace_back(createLocalPlan( diff --git a/src/Interpreters/ClusterProxy/executeQuery.cpp b/src/Interpreters/ClusterProxy/executeQuery.cpp index 420bb447027..41235d107cd 100644 --- a/src/Interpreters/ClusterProxy/executeQuery.cpp +++ b/src/Interpreters/ClusterProxy/executeQuery.cpp @@ -141,14 +141,6 @@ ContextMutablePtr updateSettingsForCluster(const Cluster & cluster, new_settings.allow_experimental_parallel_reading_from_replicas = false; } - if (settings.max_execution_time_leaf.value > 0) - { - /// Replace 'max_execution_time' of this sub-query with 'max_execution_time_leaf' and 'timeout_overflow_mode' - /// with 'timeout_overflow_mode_leaf' - new_settings.max_execution_time = settings.max_execution_time_leaf; - new_settings.timeout_overflow_mode = settings.timeout_overflow_mode_leaf; - } - auto new_context = Context::createCopy(context); new_context->setSettings(new_settings); return new_context; diff --git a/tests/queries/0_stateless/02786_max_execution_time_leaf.reference b/tests/queries/0_stateless/02786_max_execution_time_leaf.reference deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/queries/0_stateless/02786_max_execution_time_leaf.sql b/tests/queries/0_stateless/02786_max_execution_time_leaf.sql deleted file mode 100644 index 1d02e82569c..00000000000 --- a/tests/queries/0_stateless/02786_max_execution_time_leaf.sql +++ /dev/null @@ -1,4 +0,0 @@ --- Tags: no-fasttest -SELECT count() FROM cluster('test_cluster_two_shards', view( SELECT * FROM numbers(100000000000) )) SETTINGS max_execution_time_leaf = 1; -- { serverError 159 } --- Can return partial result -SELECT count() FROM cluster('test_cluster_two_shards', view( SELECT * FROM numbers(100000000000) )) FORMAT Null SETTINGS max_execution_time_leaf = 1, timeout_overflow_mode_leaf = 'break'; From 207c4c50cb3ec790aa168113adec9212674f922b Mon Sep 17 00:00:00 2001 From: Ethan Shea <142333519+ethshea@users.noreply.github.com> Date: Mon, 13 Nov 2023 16:12:05 -0500 Subject: [PATCH 93/99] Improve legibility of window functions list Before, markdown was rendering these as one big paragraph. --- docs/en/sql-reference/window-functions/index.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/en/sql-reference/window-functions/index.md b/docs/en/sql-reference/window-functions/index.md index a8f494a5afc..7a18335fbdb 100644 --- a/docs/en/sql-reference/window-functions/index.md +++ b/docs/en/sql-reference/window-functions/index.md @@ -86,14 +86,14 @@ WINDOW window_name as ([[PARTITION BY grouping_column] [ORDER BY sorting_column] These functions can be used only as a window function. -`row_number()` - Number the current row within its partition starting from 1. -`first_value(x)` - Return the first non-NULL value evaluated within its ordered frame. -`last_value(x)` - Return the last non-NULL value evaluated within its ordered frame. -`nth_value(x, offset)` - Return the first non-NULL value evaluated against the nth row (offset) in its ordered frame. -`rank()` - Rank the current row within its partition with gaps. -`dense_rank()` - Rank the current row within its partition without gaps. -`lagInFrame(x)` - Return a value evaluated at the row that is at a specified physical offset row before the current row within the ordered frame. -`leadInFrame(x)` - Return a value evaluated at the row that is offset rows after the current row within the ordered frame. +- `row_number()` - Number the current row within its partition starting from 1. +- `first_value(x)` - Return the first non-NULL value evaluated within its ordered frame. +- `last_value(x)` - Return the last non-NULL value evaluated within its ordered frame. +- `nth_value(x, offset)` - Return the first non-NULL value evaluated against the nth row (offset) in its ordered frame. +- `rank()` - Rank the current row within its partition with gaps. +- `dense_rank()` - Rank the current row within its partition without gaps. +- `lagInFrame(x)` - Return a value evaluated at the row that is at a specified physical offset row before the current row within the ordered frame. +- `leadInFrame(x)` - Return a value evaluated at the row that is offset rows after the current row within the ordered frame. ```text PARTITION From 37dc535d780ed2f515b3a904a14b44555d232435 Mon Sep 17 00:00:00 2001 From: Ethan Shea <142333519+ethshea@users.noreply.github.com> Date: Mon, 13 Nov 2023 16:16:07 -0500 Subject: [PATCH 94/99] Remove tabs --- docs/en/sql-reference/window-functions/index.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/en/sql-reference/window-functions/index.md b/docs/en/sql-reference/window-functions/index.md index 7a18335fbdb..6340c369bff 100644 --- a/docs/en/sql-reference/window-functions/index.md +++ b/docs/en/sql-reference/window-functions/index.md @@ -86,12 +86,12 @@ WINDOW window_name as ([[PARTITION BY grouping_column] [ORDER BY sorting_column] These functions can be used only as a window function. -- `row_number()` - Number the current row within its partition starting from 1. -- `first_value(x)` - Return the first non-NULL value evaluated within its ordered frame. +- `row_number()` - Number the current row within its partition starting from 1. +- `first_value(x)` - Return the first non-NULL value evaluated within its ordered frame. - `last_value(x)` - Return the last non-NULL value evaluated within its ordered frame. - `nth_value(x, offset)` - Return the first non-NULL value evaluated against the nth row (offset) in its ordered frame. -- `rank()` - Rank the current row within its partition with gaps. -- `dense_rank()` - Rank the current row within its partition without gaps. +- `rank()` - Rank the current row within its partition with gaps. +- `dense_rank()` - Rank the current row within its partition without gaps. - `lagInFrame(x)` - Return a value evaluated at the row that is at a specified physical offset row before the current row within the ordered frame. - `leadInFrame(x)` - Return a value evaluated at the row that is offset rows after the current row within the ordered frame. From 8c9f12fe17ecdf3b4449a8a2bd699cf0cea4852d Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Tue, 14 Nov 2023 08:05:33 +0300 Subject: [PATCH 95/99] Update wide_integer_impl.h --- base/base/wide_integer_impl.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/base/base/wide_integer_impl.h b/base/base/wide_integer_impl.h index 8d1da7de642..c1fd7b69b7f 100644 --- a/base/base/wide_integer_impl.h +++ b/base/base/wide_integer_impl.h @@ -104,7 +104,7 @@ public: static constexpr wide::integer min() noexcept { - if (is_same_v) + if constexpr (is_same_v) { using T = wide::integer; T res{}; From 32dd1b26b305a323144f77f3d6081746f4c28578 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Tue, 14 Nov 2023 06:26:05 +0100 Subject: [PATCH 96/99] Make autogenerated file as light as possible --- src/Daemon/BaseDaemon.cpp | 2 +- src/Daemon/GitHash.cpp.in | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/Daemon/BaseDaemon.cpp b/src/Daemon/BaseDaemon.cpp index 8833156386f..7d19b935c2d 100644 --- a/src/Daemon/BaseDaemon.cpp +++ b/src/Daemon/BaseDaemon.cpp @@ -967,7 +967,7 @@ static void blockSignals(const std::vector & signals) throw Poco::Exception("Cannot block signal."); } -extern String getGitHash(); +extern const char * getGitHash(); void BaseDaemon::initializeTerminationAndSignalProcessing() { diff --git a/src/Daemon/GitHash.cpp.in b/src/Daemon/GitHash.cpp.in index 7e33682d670..65937af8848 100644 --- a/src/Daemon/GitHash.cpp.in +++ b/src/Daemon/GitHash.cpp.in @@ -1,8 +1,6 @@ /// This file was autogenerated by CMake -#include - -String getGitHash() +const char * getGitHash() { return "@GIT_HASH@"; } From cb74f52c5cec99d674068b7304d150c2a5bd1e23 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Tue, 14 Nov 2023 06:27:39 +0100 Subject: [PATCH 97/99] Make autogenerated file as light as possible --- src/Daemon/BaseDaemon.cpp | 4 ++-- src/Daemon/GitHash.cpp.in | 5 +---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/Daemon/BaseDaemon.cpp b/src/Daemon/BaseDaemon.cpp index 7d19b935c2d..d66bdf3583f 100644 --- a/src/Daemon/BaseDaemon.cpp +++ b/src/Daemon/BaseDaemon.cpp @@ -967,7 +967,7 @@ static void blockSignals(const std::vector & signals) throw Poco::Exception("Cannot block signal."); } -extern const char * getGitHash(); +extern const char * GIT_HASH; void BaseDaemon::initializeTerminationAndSignalProcessing() { @@ -1007,7 +1007,7 @@ void BaseDaemon::initializeTerminationAndSignalProcessing() build_id = ""; #endif - git_hash = getGitHash(); + git_hash = GIT_HASH; #if defined(OS_LINUX) std::string executable_path = getExecutablePath(); diff --git a/src/Daemon/GitHash.cpp.in b/src/Daemon/GitHash.cpp.in index 65937af8848..c5990531449 100644 --- a/src/Daemon/GitHash.cpp.in +++ b/src/Daemon/GitHash.cpp.in @@ -1,6 +1,3 @@ /// This file was autogenerated by CMake -const char * getGitHash() -{ - return "@GIT_HASH@"; -} +const char * GIT_HASH = "@GIT_HASH@"; From 80bbe7dcc6a3d6f381325ae93a1a59f41904fe91 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Tue, 14 Nov 2023 06:55:06 +0100 Subject: [PATCH 98/99] Less CMake checks --- base/glibc-compatibility/CMakeLists.txt | 8 -- cmake/cpu_features.cmake | 180 ++++-------------------- contrib/aws-cmake/AwsFeatureTests.cmake | 115 +-------------- contrib/aws-cmake/AwsSIMD.cmake | 51 +------ contrib/libunwind-cmake/CMakeLists.txt | 23 +-- 5 files changed, 38 insertions(+), 339 deletions(-) diff --git a/base/glibc-compatibility/CMakeLists.txt b/base/glibc-compatibility/CMakeLists.txt index 0539f0c231d..65677ed2cf3 100644 --- a/base/glibc-compatibility/CMakeLists.txt +++ b/base/glibc-compatibility/CMakeLists.txt @@ -5,9 +5,6 @@ if (GLIBC_COMPATIBILITY) endif() enable_language(ASM) - include(CheckIncludeFile) - - check_include_file("sys/random.h" HAVE_SYS_RANDOM_H) add_headers_and_sources(glibc_compatibility .) add_headers_and_sources(glibc_compatibility musl) @@ -21,11 +18,6 @@ if (GLIBC_COMPATIBILITY) message (FATAL_ERROR "glibc_compatibility can only be used on x86_64 or aarch64.") endif () - list(REMOVE_ITEM glibc_compatibility_sources musl/getentropy.c) - if(HAVE_SYS_RANDOM_H) - list(APPEND glibc_compatibility_sources musl/getentropy.c) - endif() - # Need to omit frame pointers to match the performance of glibc set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer") diff --git a/cmake/cpu_features.cmake b/cmake/cpu_features.cmake index 9fc3960c166..765e36403ad 100644 --- a/cmake/cpu_features.cmake +++ b/cmake/cpu_features.cmake @@ -1,10 +1,5 @@ # https://software.intel.com/sites/landingpage/IntrinsicsGuide/ -include (CheckCXXSourceCompiles) -include (CMakePushCheckState) - -cmake_push_check_state () - # The variables HAVE_* determine if compiler has support for the flag to use the corresponding instruction set. # The options ENABLE_* determine if we will tell compiler to actually use the corresponding instruction set if compiler can do it. @@ -137,178 +132,53 @@ elseif (ARCH_AMD64) endif() # ClickHouse can be cross-compiled (e.g. on an ARM host for x86) but it is also possible to build ClickHouse on x86 w/o AVX for x86 w/ - # AVX. We only check that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary. - # Therefore, use check_cxx_source_compiles (= does the code compile+link?) instead of check_cxx_source_runs (= does the code - # compile+link+run). + # AVX. We only assume that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary. + + SET (HAVE_SSSE3 1) + SET (HAVE_SSE41 1) + SET (HAVE_SSE42 1) + SET (HAVE_PCLMULQDQ 1) + SET (HAVE_POPCNT 1) + SET (HAVE_AVX 1) + SET (HAVE_AVX2 1) + SET (HAVE_AVX512 1) + SET (HAVE_AVX512_VBMI 1) + SET (HAVE_BMI 1) + SET (HAVE_BMI2 1) - set (TEST_FLAG "-mssse3") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - __m64 a = _mm_abs_pi8(__m64()); - (void)a; - return 0; - } - " HAVE_SSSE3) if (HAVE_SSSE3 AND ENABLE_SSSE3) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mssse3") endif () - - set (TEST_FLAG "-msse4.1") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm_insert_epi8(__m128i(), 0, 0); - (void)a; - return 0; - } - " HAVE_SSE41) if (HAVE_SSE41 AND ENABLE_SSE41) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -msse4.1") endif () - - set (TEST_FLAG "-msse4.2") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm_crc32_u64(0, 0); - (void)a; - return 0; - } - " HAVE_SSE42) if (HAVE_SSE42 AND ENABLE_SSE42) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -msse4.2") endif () - - set (TEST_FLAG "-mpclmul") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm_clmulepi64_si128(__m128i(), __m128i(), 0); - (void)a; - return 0; - } - " HAVE_PCLMULQDQ) if (HAVE_PCLMULQDQ AND ENABLE_PCLMULQDQ) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mpclmul") endif () - - set (TEST_FLAG "-mpopcnt") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - int main() { - auto a = __builtin_popcountll(0); - (void)a; - return 0; - } - " HAVE_POPCNT) if (HAVE_POPCNT AND ENABLE_POPCNT) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mpopcnt") endif () - - set (TEST_FLAG "-mavx") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm256_insert_epi8(__m256i(), 0, 0); - (void)a; - return 0; - } - " HAVE_AVX) if (HAVE_AVX AND ENABLE_AVX) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx") endif () - - set (TEST_FLAG "-mavx2") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm256_add_epi16(__m256i(), __m256i()); - (void)a; - return 0; - } - " HAVE_AVX2) if (HAVE_AVX2 AND ENABLE_AVX2) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx2") endif () - - set (TEST_FLAG "-mavx512f -mavx512bw -mavx512vl") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm512_setzero_epi32(); - (void)a; - auto b = _mm512_add_epi16(__m512i(), __m512i()); - (void)b; - auto c = _mm_cmp_epi8_mask(__m128i(), __m128i(), 0); - (void)c; - return 0; - } - " HAVE_AVX512) if (HAVE_AVX512 AND ENABLE_AVX512) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx512f -mavx512bw -mavx512vl") endif () - - set (TEST_FLAG "-mavx512vbmi") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _mm512_permutexvar_epi8(__m512i(), __m512i()); - (void)a; - return 0; - } - " HAVE_AVX512_VBMI) if (HAVE_AVX512 AND ENABLE_AVX512 AND HAVE_AVX512_VBMI AND ENABLE_AVX512_VBMI) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mavx512vbmi") endif () - - set (TEST_FLAG "-mbmi") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _blsr_u32(0); - (void)a; - return 0; - } - " HAVE_BMI) if (HAVE_BMI AND ENABLE_BMI) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mbmi") endif () - - set (TEST_FLAG "-mbmi2") - set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") - check_cxx_source_compiles(" - #include - int main() { - auto a = _pdep_u64(0, 0); - (void)a; - return 0; - } - " HAVE_BMI2) if (HAVE_BMI2 AND HAVE_AVX2 AND ENABLE_AVX2 AND ENABLE_BMI2) - set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") + set (COMPILER_FLAGS "${COMPILER_FLAGS} -mbmi2") endif () - - # Limit avx2/avx512 flag for specific source build - set (X86_INTRINSICS_FLAGS "") - if (ENABLE_AVX2_FOR_SPEC_OP) - if (HAVE_BMI) - set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi") - endif () - if (HAVE_AVX AND HAVE_AVX2) - set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx -mavx2") - endif () - endif () - if (ENABLE_AVX512_FOR_SPEC_OP) set (X86_INTRINSICS_FLAGS "") if (HAVE_BMI) @@ -321,5 +191,3 @@ elseif (ARCH_AMD64) else () # RISC-V + exotic platforms endif () - -cmake_pop_check_state () diff --git a/contrib/aws-cmake/AwsFeatureTests.cmake b/contrib/aws-cmake/AwsFeatureTests.cmake index 54727e08d6b..e58b6634f42 100644 --- a/contrib/aws-cmake/AwsFeatureTests.cmake +++ b/contrib/aws-cmake/AwsFeatureTests.cmake @@ -1,114 +1,13 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -include(CheckCSourceRuns) - option(USE_CPU_EXTENSIONS "Whenever possible, use functions optimized for CPUs with specific extensions (ex: SSE, AVX)." ON) -# In the current (11/2/21) state of mingw64, the packaged gcc is not capable of emitting properly aligned avx2 instructions under certain circumstances. -# This leads to crashes for windows builds using mingw64 when invoking the avx2-enabled versions of certain functions. Until we can find a better -# work-around, disable avx2 (and all other extensions) in mingw builds. -# -# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412 -# -if (MINGW) - message(STATUS "MINGW detected! Disabling avx2 and other CPU extensions") - set(USE_CPU_EXTENSIONS OFF) -endif() +if (ARCH_AMD64) + set (AWS_ARCH_INTEL 1) +elseif (ARCH_AARCH64) + set (AWS_ARCH_ARM64 1) +endif () -if(NOT CMAKE_CROSSCOMPILING) - check_c_source_runs(" - #include - bool foo(int a, int b, int *c) { - return __builtin_mul_overflow(a, b, c); - } - - int main() { - int out; - if (foo(1, 2, &out)) { - return 0; - } - - return 0; - }" AWS_HAVE_GCC_OVERFLOW_MATH_EXTENSIONS) - - if (USE_CPU_EXTENSIONS) - check_c_source_runs(" - int main() { - int foo = 42; - _mulx_u32(1, 2, &foo); - return foo != 2; - }" AWS_HAVE_MSVC_MULX) - endif() - -endif() - -check_c_source_compiles(" - #include - #if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) - int main() { - return 0; - } - #else - it's not windows desktop - #endif -" AWS_HAVE_WINAPI_DESKTOP) - -check_c_source_compiles(" - int main() { -#if !(defined(__x86_64__) || defined(__i386__) || defined(_M_X64) || defined(_M_IX86)) -# error \"not intel\" -#endif - return 0; - } -" AWS_ARCH_INTEL) - -check_c_source_compiles(" - int main() { -#if !(defined(__aarch64__) || defined(_M_ARM64)) -# error \"not arm64\" -#endif - return 0; - } -" AWS_ARCH_ARM64) - -check_c_source_compiles(" - int main() { -#if !(defined(__arm__) || defined(_M_ARM)) -# error \"not arm\" -#endif - return 0; - } -" AWS_ARCH_ARM32) - -check_c_source_compiles(" -int main() { - int foo = 42, bar = 24; - __asm__ __volatile__(\"\":\"=r\"(foo):\"r\"(bar):\"memory\"); -}" AWS_HAVE_GCC_INLINE_ASM) - -check_c_source_compiles(" -#include -int main() { -#ifdef __linux__ - getauxval(AT_HWCAP); - getauxval(AT_HWCAP2); -#endif - return 0; -}" AWS_HAVE_AUXV) - -string(REGEX MATCH "^(aarch64|arm)" ARM_CPU "${CMAKE_SYSTEM_PROCESSOR}") -if(NOT LEGACY_COMPILER_SUPPORT OR ARM_CPU) - check_c_source_compiles(" - #include - int main() { - backtrace(NULL, 0); - return 0; - }" AWS_HAVE_EXECINFO) -endif() - -check_c_source_compiles(" -#include -int main() { - return 1; -}" AWS_HAVE_LINUX_IF_LINK_H) +set (AWS_HAVE_GCC_INLINE_ASM 1) +set (AWS_HAVE_AUXV 1) diff --git a/contrib/aws-cmake/AwsSIMD.cmake b/contrib/aws-cmake/AwsSIMD.cmake index bd6f4064e78..a2f50f27d4e 100644 --- a/contrib/aws-cmake/AwsSIMD.cmake +++ b/contrib/aws-cmake/AwsSIMD.cmake @@ -1,54 +1,13 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -include(CheckCCompilerFlag) -include(CheckIncludeFile) - if (USE_CPU_EXTENSIONS) - if (MSVC) - check_c_compiler_flag("/arch:AVX2" HAVE_M_AVX2_FLAG) - if (HAVE_M_AVX2_FLAG) - set(AVX2_CFLAGS "/arch:AVX2") - endif() - else() - check_c_compiler_flag(-mavx2 HAVE_M_AVX2_FLAG) - if (HAVE_M_AVX2_FLAG) - set(AVX2_CFLAGS "-mavx -mavx2") - endif() + if (HAVE_AVX2) + set (AVX2_CFLAGS "-mavx -mavx2") + set (HAVE_AVX2_INTRINSICS 1) + set (HAVE_MM256_EXTRACT_EPI64 1) endif() - - - cmake_push_check_state() - set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${AVX2_CFLAGS}") - - check_c_source_compiles(" - #include - #include - #include - - int main() { - __m256i vec; - memset(&vec, 0, sizeof(vec)); - - _mm256_shuffle_epi8(vec, vec); - _mm256_set_epi32(1,2,3,4,5,6,7,8); - _mm256_permutevar8x32_epi32(vec, vec); - - return 0; - }" HAVE_AVX2_INTRINSICS) - - check_c_source_compiles(" - #include - #include - - int main() { - __m256i vec; - memset(&vec, 0, sizeof(vec)); - return (int)_mm256_extract_epi64(vec, 2); - }" HAVE_MM256_EXTRACT_EPI64) - - cmake_pop_check_state() -endif() # USE_CPU_EXTENSIONS +endif() macro(simd_add_definition_if target definition) if(${definition}) diff --git a/contrib/libunwind-cmake/CMakeLists.txt b/contrib/libunwind-cmake/CMakeLists.txt index 733f99d07f5..0d872bae5d1 100644 --- a/contrib/libunwind-cmake/CMakeLists.txt +++ b/contrib/libunwind-cmake/CMakeLists.txt @@ -1,6 +1,3 @@ -include(CheckCCompilerFlag) -include(CheckCXXCompilerFlag) - set(LIBUNWIND_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libunwind") set(LIBUNWIND_CXX_SOURCES @@ -48,27 +45,11 @@ target_compile_definitions(unwind PRIVATE -D_LIBUNWIND_NO_HEAP=1 -D_DEBUG -D_LIB # and disable sanitizers (otherwise infinite loop may happen) target_compile_options(unwind PRIVATE -O3 -fno-exceptions -funwind-tables -fno-sanitize=all $<$:-nostdinc++ -fno-rtti>) -check_c_compiler_flag(-Wunused-but-set-variable HAVE_WARNING_UNUSED_BUT_SET_VARIABLE) -if (HAVE_WARNING_UNUSED_BUT_SET_VARIABLE) - target_compile_options(unwind PRIVATE -Wno-unused-but-set-variable) -endif () - -check_cxx_compiler_flag(-Wmissing-attributes HAVE_WARNING_MISSING_ATTRIBUTES) -if (HAVE_WARNING_MISSING_ATTRIBUTES) - target_compile_options(unwind PRIVATE -Wno-missing-attributes) -endif () - -check_cxx_compiler_flag(-Wmaybe-uninitialized HAVE_WARNING_MAYBE_UNINITIALIZED) -if (HAVE_WARNING_MAYBE_UNINITIALIZED) - target_compile_options(unwind PRIVATE -Wno-maybe-uninitialized) -endif () +target_compile_options(unwind PRIVATE -Wno-unused-but-set-variable) # The library is using register variables that are bound to specific registers # Example: DwarfInstructions.hpp: register unsigned long long x16 __asm("x16") = cfa; -check_cxx_compiler_flag(-Wregister HAVE_WARNING_REGISTER) -if (HAVE_WARNING_REGISTER) - target_compile_options(unwind PRIVATE "$<$:-Wno-register>") -endif () +target_compile_options(unwind PRIVATE "$<$:-Wno-register>") install( TARGETS unwind From 4e3e8e32fde948d69ad78fb7f7cf9fe293f55b91 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Tue, 14 Nov 2023 07:24:41 +0100 Subject: [PATCH 99/99] Remove more garbage --- contrib/aws-cmake/AwsThreadAffinity.cmake | 45 +------------ contrib/aws-cmake/AwsThreadName.cmake | 52 +-------------- contrib/libcxx-cmake/CMakeLists.txt | 2 - contrib/rocksdb-cmake/CMakeLists.txt | 80 ++--------------------- contrib/xz-cmake/CMakeLists.txt | 2 - utils/check-style/check-style | 3 + 6 files changed, 13 insertions(+), 171 deletions(-) diff --git a/contrib/aws-cmake/AwsThreadAffinity.cmake b/contrib/aws-cmake/AwsThreadAffinity.cmake index 9e53481272c..7f30fb71b43 100644 --- a/contrib/aws-cmake/AwsThreadAffinity.cmake +++ b/contrib/aws-cmake/AwsThreadAffinity.cmake @@ -1,50 +1,9 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -include(CheckSymbolExists) - # Check if the platform supports setting thread affinity # (important for hitting full NIC entitlement on NUMA architectures) function(aws_set_thread_affinity_method target) - - # Non-POSIX, Android, and Apple platforms do not support thread affinity. - if (NOT UNIX OR ANDROID OR APPLE) - target_compile_definitions(${target} PRIVATE - -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE) - return() - endif() - - cmake_push_check_state() - list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE) - list(APPEND CMAKE_REQUIRED_LIBRARIES pthread) - - set(headers "pthread.h") - # BSDs put nonportable pthread declarations in a separate header. - if(CMAKE_SYSTEM_NAME MATCHES BSD) - set(headers "${headers};pthread_np.h") - endif() - - # Using pthread attrs is the preferred method, but is glibc-specific. - check_symbol_exists(pthread_attr_setaffinity_np "${headers}" USE_PTHREAD_ATTR_SETAFFINITY) - if (USE_PTHREAD_ATTR_SETAFFINITY) - target_compile_definitions(${target} PRIVATE - -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD_ATTR) - return() - endif() - - # This method is still nonportable, but is supported by musl and BSDs. - check_symbol_exists(pthread_setaffinity_np "${headers}" USE_PTHREAD_SETAFFINITY) - if (USE_PTHREAD_SETAFFINITY) - target_compile_definitions(${target} PRIVATE - -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD) - return() - endif() - - # If we got here, we expected thread affinity support but didn't find it. - # We still build with degraded NUMA performance, but show a warning. - message(WARNING "No supported method for setting thread affinity") - target_compile_definitions(${target} PRIVATE - -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE) - - cmake_pop_check_state() + # This code has been cut, because I don't care about it. + target_compile_definitions(${target} PRIVATE -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE) endfunction() diff --git a/contrib/aws-cmake/AwsThreadName.cmake b/contrib/aws-cmake/AwsThreadName.cmake index a67416b4f83..e17759435ed 100644 --- a/contrib/aws-cmake/AwsThreadName.cmake +++ b/contrib/aws-cmake/AwsThreadName.cmake @@ -1,61 +1,13 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. -include(CheckSymbolExists) - # Check how the platform supports setting thread name function(aws_set_thread_name_method target) - - if (WINDOWS) - # On Windows we do a runtime check, instead of compile-time check - return() - elseif (APPLE) + if (APPLE) # All Apple platforms we support have the same function, so no need for compile-time check. return() endif() - cmake_push_check_state() - list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE) - list(APPEND CMAKE_REQUIRED_LIBRARIES pthread) - - # The start of the test program - set(c_source_start " - #define _GNU_SOURCE - #include - - #if defined(__FreeBSD__) || defined(__NETBSD__) - #include - #endif - - int main() { - pthread_t thread_id; - ") - - # The end of the test program - set(c_source_end "}") - # pthread_setname_np() usually takes 2 args - check_c_source_compiles(" - ${c_source_start} - pthread_setname_np(thread_id, \"asdf\"); - ${c_source_end}" - PTHREAD_SETNAME_TAKES_2ARGS) - if (PTHREAD_SETNAME_TAKES_2ARGS) - target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_2ARGS) - return() - endif() - - # But on NetBSD it takes 3! - check_c_source_compiles(" - ${c_source_start} - pthread_setname_np(thread_id, \"asdf\", NULL); - ${c_source_end} - " PTHREAD_SETNAME_TAKES_3ARGS) - if (PTHREAD_SETNAME_TAKES_3ARGS) - target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_3ARGS) - return() - endif() - - # And on many older/weirder platforms it's just not supported - cmake_pop_check_state() + target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_2ARGS) endfunction() diff --git a/contrib/libcxx-cmake/CMakeLists.txt b/contrib/libcxx-cmake/CMakeLists.txt index b7e59e2c9a3..c77d5d8319e 100644 --- a/contrib/libcxx-cmake/CMakeLists.txt +++ b/contrib/libcxx-cmake/CMakeLists.txt @@ -1,5 +1,3 @@ -include(CheckCXXCompilerFlag) - set(LIBCXX_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/llvm-project/libcxx") set(SRCS diff --git a/contrib/rocksdb-cmake/CMakeLists.txt b/contrib/rocksdb-cmake/CMakeLists.txt index 466adf6aff0..2b6c48f0b38 100644 --- a/contrib/rocksdb-cmake/CMakeLists.txt +++ b/contrib/rocksdb-cmake/CMakeLists.txt @@ -76,7 +76,6 @@ else() endif() endif() -include(CheckCCompilerFlag) if(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64") if(POWER9) set(HAS_POWER9 1) @@ -88,21 +87,12 @@ if(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64") endif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64") if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|AARCH64|arm64|ARM64") - CHECK_C_COMPILER_FLAG("-march=armv8-a+crc+crypto" HAS_ARMV8_CRC) - if(HAS_ARMV8_CRC) - message(STATUS " HAS_ARMV8_CRC yes") - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function") - endif(HAS_ARMV8_CRC) + set(HAS_ARMV8_CRC 1) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=armv8-a+crc+crypto -Wno-unused-function") endif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|AARCH64|arm64|ARM64") -include(CheckCXXSourceCompiles) -if(NOT MSVC) - set(CMAKE_REQUIRED_FLAGS "-msse4.2 -mpclmul") -endif() - -unset(CMAKE_REQUIRED_FLAGS) if(HAVE_SSE42) add_definitions(-DHAVE_SSE42) add_definitions(-DHAVE_PCLMUL) @@ -121,75 +111,18 @@ elseif(CMAKE_SYSTEM_NAME MATCHES "Linux") add_definitions(-DOS_LINUX) elseif(CMAKE_SYSTEM_NAME MATCHES "SunOS") add_definitions(-DOS_SOLARIS) -elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD") - add_definitions(-DOS_GNU_KFREEBSD) elseif(CMAKE_SYSTEM_NAME MATCHES "FreeBSD") add_definitions(-DOS_FREEBSD) -elseif(CMAKE_SYSTEM_NAME MATCHES "NetBSD") - add_definitions(-DOS_NETBSD) -elseif(CMAKE_SYSTEM_NAME MATCHES "OpenBSD") - add_definitions(-DOS_OPENBSD) -elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly") - add_definitions(-DOS_DRAGONFLYBSD) elseif(CMAKE_SYSTEM_NAME MATCHES "Android") add_definitions(-DOS_ANDROID) -elseif(CMAKE_SYSTEM_NAME MATCHES "Windows") - add_definitions(-DWIN32 -DOS_WIN -D_MBCS -DWIN64 -DNOMINMAX) - if(MINGW) - add_definitions(-D_WIN32_WINNT=_WIN32_WINNT_VISTA) - endif() endif() -if(NOT WIN32) - add_definitions(-DROCKSDB_PLATFORM_POSIX -DROCKSDB_LIB_IO_POSIX) -endif() +add_definitions(-DROCKSDB_PLATFORM_POSIX -DROCKSDB_LIB_IO_POSIX) -option(WITH_FALLOCATE "build with fallocate" ON) -if(WITH_FALLOCATE) - CHECK_C_SOURCE_COMPILES(" -#include -#include -int main() { - int fd = open(\"/dev/null\", 0); - fallocate(fd, FALLOC_FL_KEEP_SIZE, 0, 1024); -} -" HAVE_FALLOCATE) - if(HAVE_FALLOCATE) - add_definitions(-DROCKSDB_FALLOCATE_PRESENT) - endif() -endif() - -CHECK_C_SOURCE_COMPILES(" -#include -int main() { - int fd = open(\"/dev/null\", 0); - sync_file_range(fd, 0, 1024, SYNC_FILE_RANGE_WRITE); -} -" HAVE_SYNC_FILE_RANGE_WRITE) -if(HAVE_SYNC_FILE_RANGE_WRITE) - add_definitions(-DROCKSDB_RANGESYNC_PRESENT) -endif() - -CHECK_C_SOURCE_COMPILES(" -#include -int main() { - (void) PTHREAD_MUTEX_ADAPTIVE_NP; -} -" HAVE_PTHREAD_MUTEX_ADAPTIVE_NP) -if(HAVE_PTHREAD_MUTEX_ADAPTIVE_NP) +if (OS_LINUX OR OS_FREEBSD) add_definitions(-DROCKSDB_PTHREAD_ADAPTIVE_MUTEX) endif() -include(CheckCXXSymbolExists) -if (OS_FREEBSD) - check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc_np.h" HAVE_MALLOC_USABLE_SIZE) -else() - check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc.h" HAVE_MALLOC_USABLE_SIZE) -endif() -if(HAVE_MALLOC_USABLE_SIZE) - add_definitions(-DROCKSDB_MALLOC_USABLE_SIZE) -endif() - if (OS_LINUX) add_definitions(-DROCKSDB_SCHED_GETCPU_PRESENT) add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT) @@ -204,7 +137,6 @@ include_directories("${ROCKSDB_SOURCE_DIR}/include") if(WITH_FOLLY_DISTRIBUTED_MUTEX) include_directories("${ROCKSDB_SOURCE_DIR}/third-party/folly") endif() -find_package(Threads REQUIRED) # Main library source code @@ -497,7 +429,7 @@ set(SOURCES ${ROCKSDB_SOURCE_DIR}/utilities/transactions/lock/range/range_tree/lib/util/memarena.cc rocksdb_build_version.cc) -if(HAVE_SSE42 AND NOT MSVC) +if(HAVE_SSE42) set_source_files_properties( "${ROCKSDB_SOURCE_DIR}/util/crc32c.cc" PROPERTIES COMPILE_FLAGS "-msse4.2 -mpclmul") diff --git a/contrib/xz-cmake/CMakeLists.txt b/contrib/xz-cmake/CMakeLists.txt index c3a8203c83e..c73433d9863 100644 --- a/contrib/xz-cmake/CMakeLists.txt +++ b/contrib/xz-cmake/CMakeLists.txt @@ -98,8 +98,6 @@ if (ARCH_S390X) add_compile_definitions(WORDS_BIGENDIAN) endif () -find_package(Threads REQUIRED) - add_library(_liblzma ${SRC_DIR}/src/common/mythread.h diff --git a/utils/check-style/check-style b/utils/check-style/check-style index f87d2e292b5..bd6f111354e 100755 --- a/utils/check-style/check-style +++ b/utils/check-style/check-style @@ -422,3 +422,6 @@ find $ROOT_PATH/{src,programs,utils} -name '*.h' -or -name '*.cpp' | xargs grep # Cyrillic characters hiding inside Latin. find $ROOT_PATH/{src,programs,utils} -name '*.h' -or -name '*.cpp' | xargs grep -P --line-number '[a-zA-Z][а-яА-ЯёЁ]|[а-яА-ЯёЁ][a-zA-Z]' && echo "^ Cyrillic characters found in unexpected place." + +# Don't allow dynamic compiler check with CMake, because we are using hermetic, reproducible, cross-compiled, static (TLDR, good) builds. +ls -1d $ROOT_PATH/contrib/*-cmake | xargs -I@ find @ -name 'CMakeLists.txt' -or -name '*.cmake' | xargs grep --with-filename -i -P 'check_c_compiler_flag|check_cxx_compiler_flag|check_c_source_compiles|check_cxx_source_compiles|check_include_file|check_symbol_exists|cmake_push_check_state|cmake_pop_check_state|find_package|CMAKE_REQUIRED_FLAGS|CheckIncludeFile|CheckCCompilerFlag|CheckCXXCompilerFlag|CheckCSourceCompiles|CheckCXXSourceCompiles|CheckCSymbolExists|CheckCXXSymbolExists' | grep -v Rust && echo "^ It's not allowed to have dynamic compiler checks with CMake."