diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 73b2155482e..74a6f95dbb3 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -48,8 +48,7 @@ At a minimum, the following information should be added (but add more as needed) - [ ] Allow: Stateful tests - [ ] Allow: Integration Tests - [ ] Allow: Performance tests -- [ ] Allow: Normal Builds -- [ ] Allow: Special Builds +- [ ] Allow: All Builds - [ ] Allow: All NOT Required Checks - [ ] Allow: batch 1, 2 for multi-batch jobs - [ ] Allow: batch 3, 4, 5, 6 for multi-batch jobs diff --git a/.github/workflows/backport_branches.yml b/.github/workflows/backport_branches.yml index c8c6ba30b0b..e1980ec9ef2 100644 --- a/.github/workflows/backport_branches.yml +++ b/.github/workflows/backport_branches.yml @@ -70,7 +70,7 @@ jobs: if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml with: - test_name: Compatibility check (amd64) + test_name: Compatibility check (release) runner_type: style-checker data: ${{ needs.RunConfig.outputs.data }} CompatibilityCheckAarch64: @@ -194,7 +194,7 @@ jobs: if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml with: - test_name: Install packages (amd64) + test_name: Install packages (release) runner_type: style-checker data: ${{ needs.RunConfig.outputs.data }} run_command: | @@ -204,7 +204,7 @@ jobs: if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml with: - test_name: Install packages (arm64) + test_name: Install packages (aarch64) runner_type: style-checker-aarch64 data: ${{ needs.RunConfig.outputs.data }} run_command: | diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index f5c78a6b6a1..88bc50a729d 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -115,25 +115,16 @@ jobs: data: ${{ needs.RunConfig.outputs.data }} ################################# Reports ################################# - # Reports should be run even if Builds_1/2 failed - put them separately in wf (not in Tests_1/2) - Builds_1_Report: + # Reports should run even if Builds_1/2 fail - run them separately, not in Tests_1/2/3 + Builds_Report: # run report check for failed builds to indicate the CI error if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse build check') }} - needs: [RunConfig, Builds_1] + needs: [RunConfig, Builds_1, Builds_2] uses: ./.github/workflows/reusable_test.yml with: test_name: ClickHouse build check runner_type: style-checker-aarch64 data: ${{ needs.RunConfig.outputs.data }} - Builds_2_Report: - # run report check for failed builds to indicate the CI error - if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse special build check') }} - needs: [RunConfig, Builds_2] - uses: ./.github/workflows/reusable_test.yml - with: - test_name: ClickHouse special build check - runner_type: style-checker-aarch64 - data: ${{ needs.RunConfig.outputs.data }} MarkReleaseReady: if: ${{ !failure() && !cancelled() }} @@ -165,7 +156,7 @@ jobs: FinishCheck: if: ${{ !cancelled() }} - needs: [RunConfig, Builds_1, Builds_2, Builds_1_Report, Builds_2_Report, Tests_1, Tests_2, Tests_3] + needs: [RunConfig, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2, Tests_3] runs-on: [self-hosted, style-checker-aarch64] steps: - name: Check out repository code diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 66ca3381a40..70b71da8fa5 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -143,29 +143,20 @@ jobs: data: ${{ needs.RunConfig.outputs.data }} ################################# Reports ################################# - # Reports should by run even if Builds_1/2 fail, so put them separately in wf (not in Tests_1/2) - Builds_1_Report: + # Reports should run even if Builds_1/2 fail - run them separately (not in Tests_1/2/3) + Builds_Report: # run report check for failed builds to indicate the CI error - if: ${{ !cancelled() && needs.StyleCheck.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse build check') }} - needs: [RunConfig, StyleCheck, Builds_1] + if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse build check') }} + needs: [RunConfig, StyleCheck, Builds_1, Builds_2] uses: ./.github/workflows/reusable_test.yml with: test_name: ClickHouse build check runner_type: style-checker-aarch64 data: ${{ needs.RunConfig.outputs.data }} - Builds_2_Report: - # run report check for failed builds to indicate the CI error - if: ${{ !cancelled() && needs.StyleCheck.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse special build check') }} - needs: [RunConfig, StyleCheck, Builds_2] - uses: ./.github/workflows/reusable_test.yml - with: - test_name: ClickHouse special build check - runner_type: style-checker-aarch64 - data: ${{ needs.RunConfig.outputs.data }} CheckReadyForMerge: if: ${{ !cancelled() && needs.StyleCheck.result == 'success' }} - needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_1_Report, Builds_2_Report, Tests_1, Tests_2] + needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2] runs-on: [self-hosted, style-checker-aarch64] steps: - name: Check out repository code @@ -181,7 +172,7 @@ jobs: # FinishCheck: if: ${{ !cancelled() }} - needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_1_Report, Builds_2_Report, Tests_1, Tests_2, Tests_3] + needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2, Tests_3] runs-on: [self-hosted, style-checker-aarch64] steps: - name: Check out repository code diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index f9b8a4fa764..45eb7431bb4 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -65,7 +65,7 @@ jobs: if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml with: - test_name: Compatibility check (amd64) + test_name: Compatibility check (release) runner_type: style-checker data: ${{ needs.RunConfig.outputs.data }} CompatibilityCheckAarch64: @@ -244,7 +244,7 @@ jobs: if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml with: - test_name: Install packages (amd64) + test_name: Install packages (release) runner_type: style-checker data: ${{ needs.RunConfig.outputs.data }} run_command: | @@ -254,7 +254,7 @@ jobs: if: ${{ !failure() && !cancelled() }} uses: ./.github/workflows/reusable_test.yml with: - test_name: Install packages (arm64) + test_name: Install packages (aarch64) runner_type: style-checker-aarch64 data: ${{ needs.RunConfig.outputs.data }} run_command: | diff --git a/docker/test/stateless/stress_tests.lib b/docker/test/stateless/stress_tests.lib index 3b6ad244c82..c069ccbdd8d 100644 --- a/docker/test/stateless/stress_tests.lib +++ b/docker/test/stateless/stress_tests.lib @@ -89,10 +89,6 @@ function configure() # since we run clickhouse from root sudo chown root: /var/lib/clickhouse - # Set more frequent update period of asynchronous metrics to more frequently update information about real memory usage (less chance of OOM). - echo "1" \ - > /etc/clickhouse-server/config.d/asynchronous_metrics_update_period_s.xml - local total_mem total_mem=$(awk '/MemTotal/ { print $(NF-1) }' /proc/meminfo) # KiB total_mem=$(( total_mem*1024 )) # bytes diff --git a/docs/en/interfaces/formats.md b/docs/en/interfaces/formats.md index fdbfb742a10..ffdd7e2ca25 100644 --- a/docs/en/interfaces/formats.md +++ b/docs/en/interfaces/formats.md @@ -1490,6 +1490,8 @@ Differs from [PrettySpaceNoEscapes](#prettyspacenoescapes) in that up to 10,000 - [output_format_pretty_color](/docs/en/operations/settings/settings-formats.md/#output_format_pretty_color) - use ANSI escape sequences to paint colors in Pretty formats. Default value - `true`. - [output_format_pretty_grid_charset](/docs/en/operations/settings/settings-formats.md/#output_format_pretty_grid_charset) - Charset for printing grid borders. Available charsets: ASCII, UTF-8. Default value - `UTF-8`. - [output_format_pretty_row_numbers](/docs/en/operations/settings/settings-formats.md/#output_format_pretty_row_numbers) - Add row numbers before each row for pretty output format. Default value - `true`. +- [output_format_pretty_display_footer_column_names](/docs/en/operations/settings/settings-formats.md/#output_format_pretty_display_footer_column_names) - Display column names in the footer if table contains many rows. Default value - `true`. +- [output_format_pretty_display_footer_column_names_min_rows](/docs/en/operations/settings/settings-formats.md/#output_format_pretty_display_footer_column_names_min_rows) - Sets the minimum number of rows for which a footer will be displayed if [output_format_pretty_display_footer_column_names](/docs/en/operations/settings/settings-formats.md/#output_format_pretty_display_footer_column_names) is enabled. Default value - 50. ## RowBinary {#rowbinary} diff --git a/docs/en/interfaces/http.md b/docs/en/interfaces/http.md index eb1a3ba1dbc..f5b6326fa96 100644 --- a/docs/en/interfaces/http.md +++ b/docs/en/interfaces/http.md @@ -508,7 +508,7 @@ Now `rule` can configure `method`, `headers`, `url`, `handler`: - `headers` are responsible for matching the header part of the HTTP request. It is compatible with RE2’s regular expressions. It is an optional configuration. If it is not defined in the configuration file, it does not match the header portion of the HTTP request. -- `handler` contains the main processing part. Now `handler` can configure `type`, `status`, `content_type`, `response_content`, `query`, `query_param_name`. +- `handler` contains the main processing part. Now `handler` can configure `type`, `status`, `content_type`, `http_response_headers`, `response_content`, `query`, `query_param_name`. `type` currently supports three types: [predefined_query_handler](#predefined_query_handler), [dynamic_query_handler](#dynamic_query_handler), [static](#static). - `query` — use with `predefined_query_handler` type, executes query when the handler is called. @@ -519,6 +519,8 @@ Now `rule` can configure `method`, `headers`, `url`, `handler`: - `content_type` — use with any type, response [content-type](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type). + - `http_response_headers` — use with any type, response headers map. Could be used to set content type as well. + - `response_content` — use with `static` type, response content sent to client, when using the prefix ‘file://’ or ‘config://’, find the content from the file or configuration sends to client. Next are the configuration methods for different `type`. @@ -616,6 +618,33 @@ Return a message. static 402 text/html; charset=UTF-8 + + en + 43 + + Say Hi! + + + + +``` + +`http_response_headers` could be used to set content type instead of `content_type`. + +``` xml + + + GET + xxx + /hi + + static + 402 + + text/html; charset=UTF-8 + en + 43 + Say Hi! @@ -696,6 +725,9 @@ Find the content from the file send to client. static text/html; charset=UTF-8 + + 737060cd8c284d8af7ad3082f209582d + file:///absolute_path_file.html @@ -706,6 +738,9 @@ Find the content from the file send to client. static text/html; charset=UTF-8 + + 737060cd8c284d8af7ad3082f209582d + file://./relative_path_file.html diff --git a/docs/en/operations/settings/settings-formats.md b/docs/en/operations/settings/settings-formats.md index 6aae1ea62e5..670c9c6cbf1 100644 --- a/docs/en/operations/settings/settings-formats.md +++ b/docs/en/operations/settings/settings-formats.md @@ -1706,6 +1706,43 @@ Result: └────────────┘ ``` +## output_format_pretty_display_footer_column_names + +Display column names in the footer if there are many table rows. + +Possible values: + +- 0 — No column names are displayed in the footer. +- 1 — Column names are displayed in the footer if row count is greater than or equal to the threshold value set by [output_format_pretty_display_footer_column_names_min_rows](#output_format_pretty_display_footer_column_names_min_rows) (50 by default). + +Default value: `1`. + +**Example** + +Query: + +```sql +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 1000); +``` + +Result: + +```response + ┌─number─┬─toTypeName(number)─┐ + 1. │ 0 │ UInt64 │ + 2. │ 1 │ UInt64 │ + 3. │ 2 │ UInt64 │ + ... + 999. │ 998 │ UInt64 │ +1000. │ 999 │ UInt64 │ + └─number─┴─toTypeName(number)─┘ +``` +## output_format_pretty_display_footer_column_names_min_rows + +Sets the minimum number of rows for which a footer with column names will be displayed if setting [output_format_pretty_display_footer_column_names](#output_format_pretty_display_footer_column_names) is enabled. + +Default value: `50`. + ## Template format settings {#template-format-settings} ### format_template_resultset {#format_template_resultset} diff --git a/docs/en/operations/system-tables/asynchronous_metrics.md b/docs/en/operations/system-tables/asynchronous_metrics.md index 81725b97e41..762d187917c 100644 --- a/docs/en/operations/system-tables/asynchronous_metrics.md +++ b/docs/en/operations/system-tables/asynchronous_metrics.md @@ -639,6 +639,10 @@ An internal metric of the low-level memory allocator (jemalloc). See https://jem An internal metric of the low-level memory allocator (jemalloc). See https://jemalloc.net/jemalloc.3.html +### jemalloc.prof.active + +An internal metric of the low-level memory allocator (jemalloc). See https://jemalloc.net/jemalloc.3.html + **See Also** - [Monitoring](../../operations/monitoring.md) — Base concepts of ClickHouse monitoring. diff --git a/docs/en/sql-reference/data-types/float.md b/docs/en/sql-reference/data-types/float.md index 23131d5b4fe..3c789076c1e 100644 --- a/docs/en/sql-reference/data-types/float.md +++ b/docs/en/sql-reference/data-types/float.md @@ -7,33 +7,43 @@ sidebar_label: Float32, Float64 # Float32, Float64 :::note -If you need accurate calculations, in particular if you work with financial or business data requiring a high precision you should consider using Decimal instead. Floats might lead to inaccurate results as illustrated below: +If you need accurate calculations, in particular if you work with financial or business data requiring a high precision, you should consider using [Decimal](../data-types/decimal.md) instead. -``` +[Floating Point Numbers](https://en.wikipedia.org/wiki/IEEE_754) might lead to inaccurate results as illustrated below: + +```sql CREATE TABLE IF NOT EXISTS float_vs_decimal ( my_float Float64, my_decimal Decimal64(3) -)Engine=MergeTree ORDER BY tuple() - -INSERT INTO float_vs_decimal SELECT round(randCanonical(), 3) AS res, res FROM system.numbers LIMIT 1000000; # Generate 1 000 000 random number with 2 decimal places and store them as a float and as a decimal +) +Engine=MergeTree +ORDER BY tuple(); +# Generate 1 000 000 random numbers with 2 decimal places and store them as a float and as a decimal +INSERT INTO float_vs_decimal SELECT round(randCanonical(), 3) AS res, res FROM system.numbers LIMIT 1000000; +``` +``` SELECT sum(my_float), sum(my_decimal) FROM float_vs_decimal; -> 500279.56300000014 500279.563 + +┌──────sum(my_float)─┬─sum(my_decimal)─┐ +│ 499693.60500000004 │ 499693.605 │ +└────────────────────┴─────────────────┘ SELECT sumKahan(my_float), sumKahan(my_decimal) FROM float_vs_decimal; -> 500279.563 500279.563 + +┌─sumKahan(my_float)─┬─sumKahan(my_decimal)─┐ +│ 499693.605 │ 499693.605 │ +└────────────────────┴──────────────────────┘ ``` ::: -[Floating point numbers](https://en.wikipedia.org/wiki/IEEE_754). - -Types are equivalent to types of C: +The equivalent types in ClickHouse and in C are given below: - `Float32` — `float`. - `Float64` — `double`. -Aliases: +Float types in ClickHouse have the following aliases: - `Float32` — `FLOAT`, `REAL`, `SINGLE`. - `Float64` — `DOUBLE`, `DOUBLE PRECISION`. diff --git a/docs/ru/interfaces/http.md b/docs/ru/interfaces/http.md index 5f11f1b430b..d9da51892f9 100644 --- a/docs/ru/interfaces/http.md +++ b/docs/ru/interfaces/http.md @@ -414,6 +414,8 @@ $ curl -v 'http://localhost:8123/predefined_query' - `content_type` — используется со всеми типами, возвращает [content-type](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type). + - `http_response_headers` — используется со всеми типами чтобы добавить кастомные хедеры в ответ. Может использоваться в том числе для задания хедера `Content-Type` вместо `content_type`. + - `response_content` — используется с типом`static`, содержимое ответа, отправленное клиенту, при использовании префикса ‘file://’ or ‘config://’, находит содержимое из файла или конфигурации, отправленного клиенту. Далее приведены методы настройки для различных типов. @@ -509,6 +511,33 @@ max_final_threads 2 static 402 text/html; charset=UTF-8 + + en + 43 + + Say Hi! + + + + +``` + +`http_response_headers` так же может использоваться для определения `Content-Type` вместо `content_type`. + +``` xml + + + GET + xxx + /hi + + static + 402 + + text/html; charset=UTF-8 + en + 43 + Say Hi! @@ -589,6 +618,9 @@ $ curl -v -H 'XXX:xxx' 'http://localhost:8123/get_config_static_handler' static text/html; charset=UTF-8 + + 737060cd8c284d8af7ad3082f209582d + file:///absolute_path_file.html @@ -599,6 +631,9 @@ $ curl -v -H 'XXX:xxx' 'http://localhost:8123/get_config_static_handler' static text/html; charset=UTF-8 + + 737060cd8c284d8af7ad3082f209582d + file://./relative_path_file.html diff --git a/src/Access/AuthenticationData.cpp b/src/Access/AuthenticationData.cpp index a32215f3d92..70355fadfbd 100644 --- a/src/Access/AuthenticationData.cpp +++ b/src/Access/AuthenticationData.cpp @@ -31,6 +31,7 @@ namespace DB { namespace ErrorCodes { + extern const int AUTHENTICATION_FAILED; extern const int SUPPORT_IS_DISABLED; extern const int BAD_ARGUMENTS; extern const int LOGICAL_ERROR; @@ -90,8 +91,10 @@ bool AuthenticationData::Util::checkPasswordBcrypt(std::string_view password [[m { #if USE_BCRYPT int ret = bcrypt_checkpw(password.data(), reinterpret_cast(password_bcrypt.data())); + /// Before 24.6 we didn't validate hashes on creation, so it could be that the stored hash is invalid + /// and it could not be decoded by the library if (ret == -1) - throw Exception(ErrorCodes::LOGICAL_ERROR, "BCrypt library failed: bcrypt_checkpw returned {}", ret); + throw Exception(ErrorCodes::AUTHENTICATION_FAILED, "Internal failure decoding Bcrypt hash"); return (ret == 0); #else throw Exception( @@ -230,6 +233,17 @@ void AuthenticationData::setPasswordHashBinary(const Digest & hash) throw Exception(ErrorCodes::BAD_ARGUMENTS, "Password hash for the 'BCRYPT_PASSWORD' authentication type has length {} " "but must be 59 or 60 bytes.", hash.size()); + + auto resized = hash; + resized.resize(64); + +#if USE_BCRYPT + /// Verify that it is a valid hash + int ret = bcrypt_checkpw("", reinterpret_cast(resized.data())); + if (ret == -1) + throw Exception(ErrorCodes::BAD_ARGUMENTS, "Could not decode the provided hash with 'bcrypt_hash'"); +#endif + password_hash = hash; password_hash.resize(64); return; diff --git a/src/AggregateFunctions/AggregateFunctionGroupConcat.cpp b/src/AggregateFunctions/AggregateFunctionGroupConcat.cpp new file mode 100644 index 00000000000..7541d64af4a --- /dev/null +++ b/src/AggregateFunctions/AggregateFunctionGroupConcat.cpp @@ -0,0 +1,265 @@ +#include +#include +#include + +#include +#include +#include + +#include +#include + +#include +#include +#include + +#include +#include +#include +#include + +#include +#include + + +namespace DB +{ +struct Settings; + +namespace ErrorCodes +{ + extern const int TOO_MANY_ARGUMENTS_FOR_FUNCTION; + extern const int ILLEGAL_TYPE_OF_ARGUMENT; + extern const int BAD_ARGUMENTS; +} + +namespace +{ + +struct GroupConcatDataBase +{ + UInt64 data_size = 0; + UInt64 allocated_size = 0; + char * data = nullptr; + + void checkAndUpdateSize(UInt64 add, Arena * arena) + { + if (data_size + add >= allocated_size) + { + auto old_size = allocated_size; + allocated_size = std::max(2 * allocated_size, data_size + add); + data = arena->realloc(data, old_size, allocated_size); + } + } + + void insertChar(const char * str, UInt64 str_size, Arena * arena) + { + checkAndUpdateSize(str_size, arena); + memcpy(data + data_size, str, str_size); + data_size += str_size; + } + +}; + +struct GroupConcatData : public GroupConcatDataBase +{ + using Offset = UInt64; + using Allocator = MixedAlignedArenaAllocator; + using Offsets = PODArray; + + /// offset[i * 2] - beginning of the i-th row, offset[i * 2 + 1] - end of the i-th row + Offsets offsets; + UInt64 num_rows = 0; + + UInt64 getSize(size_t i) const { return offsets[i * 2 + 1] - offsets[i * 2]; } + + UInt64 getString(size_t i) const { return offsets[i * 2]; } + + void insert(const IColumn * column, const SerializationPtr & serialization, size_t row_num, Arena * arena) + { + WriteBufferFromOwnString buff; + serialization->serializeText(*column, row_num, buff, {}); + auto string = buff.stringView(); + + checkAndUpdateSize(string.size(), arena); + memcpy(data + data_size, string.data(), string.size()); + offsets.push_back(data_size, arena); + data_size += string.size(); + offsets.push_back(data_size, arena); + num_rows++; + } +}; + +template +class GroupConcatImpl final + : public IAggregateFunctionDataHelper> +{ + static constexpr auto name = "groupConcat"; + + SerializationPtr serialization; + UInt64 limit; + const String delimiter; + +public: + GroupConcatImpl(const DataTypePtr & data_type_, const Array & parameters_, UInt64 limit_, const String & delimiter_) + : IAggregateFunctionDataHelper>( + {data_type_}, parameters_, std::make_shared()) + , serialization(this->argument_types[0]->getDefaultSerialization()) + , limit(limit_) + , delimiter(delimiter_) + { + } + + String getName() const override { return name; } + + void add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena * arena) const override + { + auto & cur_data = this->data(place); + + if constexpr (has_limit) + if (cur_data.num_rows >= limit) + return; + + if (cur_data.data_size != 0) + cur_data.insertChar(delimiter.c_str(), delimiter.size(), arena); + + cur_data.insert(columns[0], serialization, row_num, arena); + } + + void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena * arena) const override + { + auto & cur_data = this->data(place); + auto & rhs_data = this->data(rhs); + + if (rhs_data.data_size == 0) + return; + + if constexpr (has_limit) + { + UInt64 new_elems_count = std::min(rhs_data.num_rows, limit - cur_data.num_rows); + for (UInt64 i = 0; i < new_elems_count; ++i) + { + if (cur_data.data_size != 0) + cur_data.insertChar(delimiter.c_str(), delimiter.size(), arena); + + cur_data.offsets.push_back(cur_data.data_size, arena); + cur_data.insertChar(rhs_data.data + rhs_data.getString(i), rhs_data.getSize(i), arena); + cur_data.num_rows++; + cur_data.offsets.push_back(cur_data.data_size, arena); + } + } + else + { + if (cur_data.data_size != 0) + cur_data.insertChar(delimiter.c_str(), delimiter.size(), arena); + + cur_data.insertChar(rhs_data.data, rhs_data.data_size, arena); + } + } + + void serialize(ConstAggregateDataPtr __restrict place, WriteBuffer & buf, std::optional /* version */) const override + { + auto & cur_data = this->data(place); + + writeVarUInt(cur_data.data_size, buf); + writeVarUInt(cur_data.allocated_size, buf); + + buf.write(cur_data.data, cur_data.data_size); + + if constexpr (has_limit) + { + writeVarUInt(cur_data.num_rows, buf); + for (const auto & offset : cur_data.offsets) + writeVarUInt(offset, buf); + } + } + + void deserialize(AggregateDataPtr __restrict place, ReadBuffer & buf, std::optional /* version */, Arena * arena) const override + { + auto & cur_data = this->data(place); + + readVarUInt(cur_data.data_size, buf); + readVarUInt(cur_data.allocated_size, buf); + + buf.readStrict(cur_data.data, cur_data.data_size); + + if constexpr (has_limit) + { + readVarUInt(cur_data.num_rows, buf); + cur_data.offsets.resize_exact(cur_data.num_rows * 2, arena); + for (auto & offset : cur_data.offsets) + readVarUInt(offset, buf); + } + } + + void insertResultInto(AggregateDataPtr __restrict place, IColumn & to, Arena *) const override + { + auto & cur_data = this->data(place); + + if (cur_data.data_size == 0) + { + auto column_nullable = IColumn::mutate(makeNullable(to.getPtr())); + column_nullable->insertDefault(); + return; + } + + auto & column_string = assert_cast(to); + column_string.insertData(cur_data.data, cur_data.data_size); + } + + bool allocatesMemoryInArena() const override { return true; } +}; + +AggregateFunctionPtr createAggregateFunctionGroupConcat( + const std::string & name, const DataTypes & argument_types, const Array & parameters, const Settings *) +{ + assertUnary(name, argument_types); + + bool has_limit = false; + UInt64 limit = 0; + String delimiter; + + if (parameters.size() > 2) + throw Exception(ErrorCodes::TOO_MANY_ARGUMENTS_FOR_FUNCTION, + "Incorrect number of parameters for aggregate function {}, should be 0, 1 or 2, got: {}", name, parameters.size()); + + if (!parameters.empty()) + { + auto type = parameters[0].getType(); + if (type != Field::Types::String) + throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT, "First parameter for aggregate function {} should be string", name); + + delimiter = parameters[0].get(); + } + if (parameters.size() == 2) + { + auto type = parameters[1].getType(); + + if (type != Field::Types::Int64 && type != Field::Types::UInt64) + throw Exception(ErrorCodes::BAD_ARGUMENTS, "Second parameter for aggregate function {} should be a positive number", name); + + if ((type == Field::Types::Int64 && parameters[1].get() <= 0) || + (type == Field::Types::UInt64 && parameters[1].get() == 0)) + throw Exception(ErrorCodes::BAD_ARGUMENTS, "Second parameter for aggregate function {} should be a positive number, got: {}", name, parameters[1].get()); + + has_limit = true; + limit = parameters[1].get(); + } + + if (has_limit) + return std::make_shared>(argument_types[0], parameters, limit, delimiter); + else + return std::make_shared>(argument_types[0], parameters, limit, delimiter); +} + +} + +void registerAggregateFunctionGroupConcat(AggregateFunctionFactory & factory) +{ + AggregateFunctionProperties properties = { .returns_default_when_only_null = false, .is_order_dependent = true }; + + factory.registerFunction("groupConcat", { createAggregateFunctionGroupConcat, properties }); + factory.registerAlias("group_concat", "groupConcat", AggregateFunctionFactory::CaseInsensitive); +} + +} diff --git a/src/AggregateFunctions/registerAggregateFunctions.cpp b/src/AggregateFunctions/registerAggregateFunctions.cpp index 58e657d3723..4ac25e14ee6 100644 --- a/src/AggregateFunctions/registerAggregateFunctions.cpp +++ b/src/AggregateFunctions/registerAggregateFunctions.cpp @@ -19,6 +19,7 @@ void registerAggregateFunctionGroupArraySorted(AggregateFunctionFactory & factor void registerAggregateFunctionGroupUniqArray(AggregateFunctionFactory &); void registerAggregateFunctionGroupArrayInsertAt(AggregateFunctionFactory &); void registerAggregateFunctionGroupArrayIntersect(AggregateFunctionFactory &); +void registerAggregateFunctionGroupConcat(AggregateFunctionFactory &); void registerAggregateFunctionsQuantile(AggregateFunctionFactory &); void registerAggregateFunctionsQuantileDeterministic(AggregateFunctionFactory &); void registerAggregateFunctionsQuantileExact(AggregateFunctionFactory &); @@ -120,6 +121,7 @@ void registerAggregateFunctions() registerAggregateFunctionGroupUniqArray(factory); registerAggregateFunctionGroupArrayInsertAt(factory); registerAggregateFunctionGroupArrayIntersect(factory); + registerAggregateFunctionGroupConcat(factory); registerAggregateFunctionsQuantile(factory); registerAggregateFunctionsQuantileDeterministic(factory); registerAggregateFunctionsQuantileExact(factory); diff --git a/src/Analyzer/Resolve/QueryAnalyzer.cpp b/src/Analyzer/Resolve/QueryAnalyzer.cpp index 5e5ecaaa93a..8860050c5b9 100644 --- a/src/Analyzer/Resolve/QueryAnalyzer.cpp +++ b/src/Analyzer/Resolve/QueryAnalyzer.cpp @@ -985,18 +985,18 @@ std::string QueryAnalyzer::rewriteAggregateFunctionNameIfNeeded( { result_aggregate_function_name = settings.count_distinct_implementation; } - else if (aggregate_function_name_lowercase == "countdistinctif" || aggregate_function_name_lowercase == "countifdistinct") + else if (aggregate_function_name_lowercase == "countifdistinct" || + (settings.rewrite_count_distinct_if_with_count_distinct_implementation && aggregate_function_name_lowercase == "countdistinctif")) { result_aggregate_function_name = settings.count_distinct_implementation; result_aggregate_function_name += "If"; } - - /// Replace aggregateFunctionIfDistinct into aggregateFunctionDistinctIf to make execution more optimal - if (result_aggregate_function_name.ends_with("ifdistinct")) + else if (aggregate_function_name_lowercase.ends_with("ifdistinct")) { + /// Replace aggregateFunctionIfDistinct into aggregateFunctionDistinctIf to make execution more optimal size_t prefix_length = result_aggregate_function_name.size() - strlen("ifdistinct"); result_aggregate_function_name = result_aggregate_function_name.substr(0, prefix_length) + "DistinctIf"; - } + } bool need_add_or_null = settings.aggregate_functions_null_for_empty && !result_aggregate_function_name.ends_with("OrNull"); if (need_add_or_null) diff --git a/src/Common/AsynchronousMetrics.cpp b/src/Common/AsynchronousMetrics.cpp index 4c71b9846c7..6309f6079f6 100644 --- a/src/Common/AsynchronousMetrics.cpp +++ b/src/Common/AsynchronousMetrics.cpp @@ -415,6 +415,15 @@ Value saveAllArenasMetric(AsynchronousMetricValues & values, fmt::format("jemalloc.arenas.all.{}", metric_name)); } +template +Value saveJemallocProf(AsynchronousMetricValues & values, + const std::string & metric_name) +{ + return saveJemallocMetricImpl(values, + fmt::format("prof.{}", metric_name), + fmt::format("jemalloc.prof.{}", metric_name)); +} + } #endif @@ -607,6 +616,7 @@ void AsynchronousMetrics::update(TimePoint update_time, bool force_update) saveJemallocMetric(new_values, "background_thread.num_threads"); saveJemallocMetric(new_values, "background_thread.num_runs"); saveJemallocMetric(new_values, "background_thread.run_intervals"); + saveJemallocProf(new_values, "active"); saveAllArenasMetric(new_values, "pactive"); [[maybe_unused]] size_t je_malloc_pdirty = saveAllArenasMetric(new_values, "pdirty"); [[maybe_unused]] size_t je_malloc_pmuzzy = saveAllArenasMetric(new_values, "pmuzzy"); diff --git a/src/Common/StatusFile.cpp b/src/Common/StatusFile.cpp index ba7595ae6d7..80464f38082 100644 --- a/src/Common/StatusFile.cpp +++ b/src/Common/StatusFile.cpp @@ -85,9 +85,18 @@ StatusFile::StatusFile(std::string path_, FillFunction fill_) /// Write information about current server instance to the file. WriteBufferFromFileDescriptor out(fd, 1024); - fill(out); - /// Finalize here to avoid throwing exceptions in destructor. - out.finalize(); + try + { + fill(out); + /// Finalize here to avoid throwing exceptions in destructor. + out.finalize(); + } + catch (...) + { + /// Finalize in case of exception to avoid throwing exceptions in destructor + out.finalize(); + throw; + } } catch (...) { diff --git a/src/Coordination/KeeperStorage.cpp b/src/Coordination/KeeperStorage.cpp index 9bcd0608bf7..9251624346b 100644 --- a/src/Coordination/KeeperStorage.cpp +++ b/src/Coordination/KeeperStorage.cpp @@ -609,7 +609,10 @@ void KeeperStorage::UncommittedState::commit(int64_t commit_zxid) uncommitted_auth.pop_front(); if (uncommitted_auth.empty()) session_and_auth.erase(add_auth->session_id); - + } + else if (auto * close_session = std::get_if(&front_delta.operation)) + { + closed_sessions.erase(close_session->session_id); } deltas.pop_front(); @@ -682,6 +685,10 @@ void KeeperStorage::UncommittedState::rollback(int64_t rollback_zxid) session_and_auth.erase(add_auth->session_id); } } + else if (auto * close_session = std::get_if(&delta_it->operation)) + { + closed_sessions.erase(close_session->session_id); + } } if (delta_it == deltas.rend()) @@ -878,6 +885,10 @@ Coordination::Error KeeperStorage::commit(int64_t commit_zxid) session_and_auth[operation.session_id].emplace_back(std::move(operation.auth_id)); return Coordination::Error::ZOK; } + else if constexpr (std::same_as) + { + return Coordination::Error::ZOK; + } else { // shouldn't be called in any process functions @@ -2366,12 +2377,15 @@ void KeeperStorage::preprocessRequest( ephemerals.erase(session_ephemerals); } + new_deltas.emplace_back(transaction.zxid, CloseSessionDelta{session_id}); + uncommitted_state.closed_sessions.insert(session_id); new_digest = calculateNodesDigest(new_digest, new_deltas); return; } - if (check_acl && !request_processor->checkAuth(*this, session_id, false)) + if ((check_acl && !request_processor->checkAuth(*this, session_id, false)) || + uncommitted_state.closed_sessions.contains(session_id)) // Is session closed but not committed yet { uncommitted_state.deltas.emplace_back(new_last_zxid, Coordination::Error::ZNOAUTH); return; diff --git a/src/Coordination/KeeperStorage.h b/src/Coordination/KeeperStorage.h index d9e67f799f8..d5e9a64e69c 100644 --- a/src/Coordination/KeeperStorage.h +++ b/src/Coordination/KeeperStorage.h @@ -314,8 +314,13 @@ public: AuthID auth_id; }; + struct CloseSessionDelta + { + int64_t session_id; + }; + using Operation = std:: - variant; + variant; struct Delta { @@ -351,6 +356,7 @@ public: std::shared_ptr tryGetNodeFromStorage(StringRef path) const; std::unordered_map> session_and_auth; + std::unordered_set closed_sessions; struct UncommittedNode { diff --git a/src/Coordination/tests/gtest_coordination.cpp b/src/Coordination/tests/gtest_coordination.cpp index d314757efc9..653b1fa0a84 100644 --- a/src/Coordination/tests/gtest_coordination.cpp +++ b/src/Coordination/tests/gtest_coordination.cpp @@ -2019,6 +2019,67 @@ TEST_P(CoordinationTest, TestCreateNodeWithAuthSchemeForAclWhenAuthIsPrecommitte EXPECT_EQ(acls[0].permissions, 31); } +TEST_P(CoordinationTest, TestPreprocessWhenCloseSessionIsPrecommitted) +{ + using namespace Coordination; + using namespace DB; + + ChangelogDirTest snapshots("./snapshots"); + setSnapshotDirectory("./snapshots"); + ResponsesQueue queue(std::numeric_limits::max()); + SnapshotsQueue snapshots_queue{1}; + int64_t session_id = 1; + size_t term = 0; + + auto state_machine = std::make_shared(queue, snapshots_queue, keeper_context, nullptr); + state_machine->init(); + + auto & storage = state_machine->getStorageUnsafe(); + const auto & uncommitted_state = storage.uncommitted_state; + + // Create first node for the session + String node_path_1 = "/node_1"; + std::shared_ptr create_req_1 = std::make_shared(); + create_req_1->path = node_path_1; + auto create_entry_1 = getLogEntryFromZKRequest(term, session_id, state_machine->getNextZxid(), create_req_1); + + state_machine->pre_commit(1, create_entry_1->get_buf()); + EXPECT_TRUE(uncommitted_state.nodes.contains(node_path_1)); + + state_machine->commit(1, create_entry_1->get_buf()); + EXPECT_TRUE(storage.container.contains(node_path_1)); + + // Close session + std::shared_ptr close_req = std::make_shared(); + auto close_entry = getLogEntryFromZKRequest(term, session_id, state_machine->getNextZxid(), close_req); + // Pre-commit close session + state_machine->pre_commit(2, close_entry->get_buf()); + + // Try to create second node after close session is pre-committed + String node_path_2 = "/node_2"; + std::shared_ptr create_req_2 = std::make_shared(); + create_req_2->path = node_path_2; + auto create_entry_2 = getLogEntryFromZKRequest(term, session_id, state_machine->getNextZxid(), create_req_2); + + // Pre-commit creating second node + state_machine->pre_commit(3, create_entry_2->get_buf()); + // Second node wasn't created + EXPECT_FALSE(uncommitted_state.nodes.contains(node_path_2)); + + // Rollback pre-committed closing session + state_machine->rollback(3, create_entry_2->get_buf()); + state_machine->rollback(2, close_entry->get_buf()); + + // Pre-commit creating second node + state_machine->pre_commit(2, create_entry_2->get_buf()); + // Now second node was created + EXPECT_TRUE(uncommitted_state.nodes.contains(node_path_2)); + + state_machine->commit(2, create_entry_2->get_buf()); + EXPECT_TRUE(storage.container.contains(node_path_1)); + EXPECT_TRUE(storage.container.contains(node_path_2)); +} + TEST_P(CoordinationTest, TestSetACLWithAuthSchemeForAclWhenAuthIsPrecommitted) { using namespace Coordination; diff --git a/src/Core/Settings.h b/src/Core/Settings.h index bda403b1b40..6197a7cf6e1 100644 --- a/src/Core/Settings.h +++ b/src/Core/Settings.h @@ -1144,7 +1144,9 @@ class IColumn; M(UInt64, output_format_pretty_max_value_width, 10000, "Maximum width of value to display in Pretty formats. If greater - it will be cut.", 0) \ M(UInt64, output_format_pretty_max_value_width_apply_for_single_value, false, "Only cut values (see the `output_format_pretty_max_value_width` setting) when it is not a single value in a block. Otherwise output it entirely, which is useful for the `SHOW CREATE TABLE` query.", 0) \ M(UInt64Auto, output_format_pretty_color, "auto", "Use ANSI escape sequences in Pretty formats. 0 - disabled, 1 - enabled, 'auto' - enabled if a terminal.", 0) \ - M(String, output_format_pretty_grid_charset, "UTF-8", "Charset for printing grid borders. Available charsets: ASCII, UTF-8 (default one).", 0) \ + M(String, output_format_pretty_grid_charset, "UTF-8", "Charset for printing grid borders. Available charsets: ASCII, UTF-8 (default one).", 0) \ + M(UInt64, output_format_pretty_display_footer_column_names, true, "Display column names in the footer if there are 999 or more rows.", 0) \ + M(UInt64, output_format_pretty_display_footer_column_names_min_rows, 50, "Sets the minimum threshold value of rows for which to enable displaying column names in the footer. 50 (default)", 0) \ M(UInt64, output_format_parquet_row_group_size, 1000000, "Target row group size in rows.", 0) \ M(UInt64, output_format_parquet_row_group_size_bytes, 512 * 1024 * 1024, "Target row group size in bytes, before compression.", 0) \ M(Bool, output_format_parquet_string_as_string, true, "Use Parquet String type instead of Binary for String columns.", 0) \ diff --git a/src/Core/SettingsChangesHistory.h b/src/Core/SettingsChangesHistory.h index c04f7a498af..eddf83f7912 100644 --- a/src/Core/SettingsChangesHistory.h +++ b/src/Core/SettingsChangesHistory.h @@ -108,6 +108,8 @@ static const std::mapvariant || result_column->empty()) + if (!variant_element_state->variant || mutable_column->empty()) { variant_element_state->variant = mutable_column->cloneEmpty(); diff --git a/src/Dictionaries/DirectDictionary.cpp b/src/Dictionaries/DirectDictionary.cpp index 933ffa04069..2c0f7653aff 100644 --- a/src/Dictionaries/DirectDictionary.cpp +++ b/src/Dictionaries/DirectDictionary.cpp @@ -175,8 +175,7 @@ Columns DirectDictionary::getColumns( if (!mask_filled) (*default_mask)[requested_key_index] = 1; - Field value{}; - result_column->insert(value); + result_column->insertDefault(); } else { diff --git a/src/Formats/FormatFactory.cpp b/src/Formats/FormatFactory.cpp index e9a405aa796..aa9600875db 100644 --- a/src/Formats/FormatFactory.cpp +++ b/src/Formats/FormatFactory.cpp @@ -181,6 +181,8 @@ FormatSettings getFormatSettings(const ContextPtr & context, const Settings & se format_settings.pretty.highlight_digit_groups = settings.output_format_pretty_highlight_digit_groups; format_settings.pretty.output_format_pretty_row_numbers = settings.output_format_pretty_row_numbers; format_settings.pretty.output_format_pretty_single_large_number_tip_threshold = settings.output_format_pretty_single_large_number_tip_threshold; + format_settings.pretty.output_format_pretty_display_footer_column_names = settings.output_format_pretty_display_footer_column_names; + format_settings.pretty.output_format_pretty_display_footer_column_names_min_rows = settings.output_format_pretty_display_footer_column_names_min_rows; format_settings.protobuf.input_flatten_google_wrappers = settings.input_format_protobuf_flatten_google_wrappers; format_settings.protobuf.output_nullables_with_google_wrappers = settings.output_format_protobuf_nullables_with_google_wrappers; format_settings.protobuf.skip_fields_with_unsupported_types_in_schema_inference = settings.input_format_protobuf_skip_fields_with_unsupported_types_in_schema_inference; diff --git a/src/Formats/FormatSettings.h b/src/Formats/FormatSettings.h index 421ed4d112d..18e7df8f24e 100644 --- a/src/Formats/FormatSettings.h +++ b/src/Formats/FormatSettings.h @@ -289,6 +289,8 @@ struct FormatSettings bool output_format_pretty_row_numbers = false; UInt64 output_format_pretty_single_large_number_tip_threshold = 1'000'000; + UInt64 output_format_pretty_display_footer_column_names = 1; + UInt64 output_format_pretty_display_footer_column_names_min_rows = 50; enum class Charset : uint8_t { diff --git a/src/IO/WriteBufferFromFile.cpp b/src/IO/WriteBufferFromFile.cpp index 0ca6c26f08c..d641e553671 100644 --- a/src/IO/WriteBufferFromFile.cpp +++ b/src/IO/WriteBufferFromFile.cpp @@ -77,7 +77,15 @@ WriteBufferFromFile::~WriteBufferFromFile() if (fd < 0) return; - finalize(); + try + { + finalize(); + } + catch (...) + { + tryLogCurrentException(__PRETTY_FUNCTION__); + } + int err = ::close(fd); /// Everything except for EBADF should be ignored in dtor, since all of /// others (EINTR/EIO/ENOSPC/EDQUOT) could be possible during writing to diff --git a/src/IO/WriteBufferFromFileDescriptor.cpp b/src/IO/WriteBufferFromFileDescriptor.cpp index 813ef0deab9..a758f99458d 100644 --- a/src/IO/WriteBufferFromFileDescriptor.cpp +++ b/src/IO/WriteBufferFromFileDescriptor.cpp @@ -105,7 +105,14 @@ WriteBufferFromFileDescriptor::WriteBufferFromFileDescriptor( WriteBufferFromFileDescriptor::~WriteBufferFromFileDescriptor() { - finalize(); + try + { + finalize(); + } + catch (...) + { + tryLogCurrentException(__PRETTY_FUNCTION__); + } } void WriteBufferFromFileDescriptor::finalizeImpl() diff --git a/src/Processors/Formats/Impl/PrettyBlockOutputFormat.cpp b/src/Processors/Formats/Impl/PrettyBlockOutputFormat.cpp index b1dbe68579f..ef3ef18e88d 100644 --- a/src/Processors/Formats/Impl/PrettyBlockOutputFormat.cpp +++ b/src/Processors/Formats/Impl/PrettyBlockOutputFormat.cpp @@ -116,6 +116,12 @@ struct GridSymbols const char * dash = "─"; const char * bold_bar = "┃"; const char * bar = "│"; + const char * bold_right_separator_footer = "┫"; + const char * bold_left_separator_footer = "┣"; + const char * bold_middle_separator_footer = "╋"; + const char * bold_left_bottom_corner = "┗"; + const char * bold_right_bottom_corner = "┛"; + const char * bold_bottom_separator = "┻"; }; GridSymbols utf8_grid_symbols; @@ -182,47 +188,58 @@ void PrettyBlockOutputFormat::writeChunk(const Chunk & chunk, PortKind port_kind Widths name_widths; calculateWidths(header, chunk, widths, max_widths, name_widths); - const GridSymbols & grid_symbols = format_settings.pretty.charset == FormatSettings::Pretty::Charset::UTF8 ? - utf8_grid_symbols : - ascii_grid_symbols; + const GridSymbols & grid_symbols + = format_settings.pretty.charset == FormatSettings::Pretty::Charset::UTF8 ? utf8_grid_symbols : ascii_grid_symbols; /// Create separators WriteBufferFromOwnString top_separator; WriteBufferFromOwnString middle_names_separator; WriteBufferFromOwnString middle_values_separator; WriteBufferFromOwnString bottom_separator; + WriteBufferFromOwnString footer_top_separator; + WriteBufferFromOwnString footer_bottom_separator; - top_separator << grid_symbols.bold_left_top_corner; - middle_names_separator << grid_symbols.bold_left_separator; + top_separator << grid_symbols.bold_left_top_corner; + middle_names_separator << grid_symbols.bold_left_separator; middle_values_separator << grid_symbols.left_separator; - bottom_separator << grid_symbols.left_bottom_corner; + bottom_separator << grid_symbols.left_bottom_corner; + footer_top_separator << grid_symbols.bold_left_separator_footer; + footer_bottom_separator << grid_symbols.bold_left_bottom_corner; for (size_t i = 0; i < num_columns; ++i) { if (i != 0) { - top_separator << grid_symbols.bold_top_separator; - middle_names_separator << grid_symbols.bold_middle_separator; + top_separator << grid_symbols.bold_top_separator; + middle_names_separator << grid_symbols.bold_middle_separator; middle_values_separator << grid_symbols.middle_separator; - bottom_separator << grid_symbols.bottom_separator; + bottom_separator << grid_symbols.bottom_separator; + footer_top_separator << grid_symbols.bold_middle_separator_footer; + footer_bottom_separator << grid_symbols.bold_bottom_separator; } for (size_t j = 0; j < max_widths[i] + 2; ++j) { - top_separator << grid_symbols.bold_dash; - middle_names_separator << grid_symbols.bold_dash; + top_separator << grid_symbols.bold_dash; + middle_names_separator << grid_symbols.bold_dash; middle_values_separator << grid_symbols.dash; - bottom_separator << grid_symbols.dash; + bottom_separator << grid_symbols.dash; + footer_top_separator << grid_symbols.bold_dash; + footer_bottom_separator << grid_symbols.bold_dash; } } - top_separator << grid_symbols.bold_right_top_corner << "\n"; - middle_names_separator << grid_symbols.bold_right_separator << "\n"; + top_separator << grid_symbols.bold_right_top_corner << "\n"; + middle_names_separator << grid_symbols.bold_right_separator << "\n"; middle_values_separator << grid_symbols.right_separator << "\n"; - bottom_separator << grid_symbols.right_bottom_corner << "\n"; + bottom_separator << grid_symbols.right_bottom_corner << "\n"; + footer_top_separator << grid_symbols.bold_right_separator_footer << "\n"; + footer_bottom_separator << grid_symbols.bold_right_bottom_corner << "\n"; std::string top_separator_s = top_separator.str(); std::string middle_names_separator_s = middle_names_separator.str(); std::string middle_values_separator_s = middle_values_separator.str(); std::string bottom_separator_s = bottom_separator.str(); + std::string footer_top_separator_s = footer_top_separator.str(); + std::string footer_bottom_separator_s = footer_bottom_separator.str(); if (format_settings.pretty.output_format_pretty_row_numbers) { @@ -239,43 +256,47 @@ void PrettyBlockOutputFormat::writeChunk(const Chunk & chunk, PortKind port_kind } /// Names - writeCString(grid_symbols.bold_bar, out); - writeCString(" ", out); - for (size_t i = 0; i < num_columns; ++i) + auto write_names = [&]() -> void { - if (i != 0) + writeCString(grid_symbols.bold_bar, out); + writeCString(" ", out); + for (size_t i = 0; i < num_columns; ++i) { - writeCString(" ", out); - writeCString(grid_symbols.bold_bar, out); - writeCString(" ", out); + if (i != 0) + { + writeCString(" ", out); + writeCString(grid_symbols.bold_bar, out); + writeCString(" ", out); + } + + const auto & col = header.getByPosition(i); + + if (color) + writeCString("\033[1m", out); + + if (col.type->shouldAlignRightInPrettyFormats()) + { + for (size_t k = 0; k < max_widths[i] - name_widths[i]; ++k) + writeChar(' ', out); + + writeString(col.name, out); + } + else + { + writeString(col.name, out); + + for (size_t k = 0; k < max_widths[i] - name_widths[i]; ++k) + writeChar(' ', out); + } + + if (color) + writeCString("\033[0m", out); } - - const auto & col = header.getByPosition(i); - - if (color) - writeCString("\033[1m", out); - - if (col.type->shouldAlignRightInPrettyFormats()) - { - for (size_t k = 0; k < max_widths[i] - name_widths[i]; ++k) - writeChar(' ', out); - - writeString(col.name, out); - } - else - { - writeString(col.name, out); - - for (size_t k = 0; k < max_widths[i] - name_widths[i]; ++k) - writeChar(' ', out); - } - - if (color) - writeCString("\033[0m", out); - } - writeCString(" ", out); - writeCString(grid_symbols.bold_bar, out); - writeCString("\n", out); + writeCString(" ", out); + writeCString(grid_symbols.bold_bar, out); + writeCString("\n", out); + }; + write_names(); if (format_settings.pretty.output_format_pretty_row_numbers) { @@ -317,9 +338,15 @@ void PrettyBlockOutputFormat::writeChunk(const Chunk & chunk, PortKind port_kind if (j != 0) writeCString(grid_symbols.bar, out); const auto & type = *header.getByPosition(j).type; - writeValueWithPadding(*columns[j], *serializations[j], i, + writeValueWithPadding( + *columns[j], + *serializations[j], + i, widths[j].empty() ? max_widths[j] : widths[j][i], - max_widths[j], cut_to_width, type.shouldAlignRightInPrettyFormats(), isNumber(type)); + max_widths[j], + cut_to_width, + type.shouldAlignRightInPrettyFormats(), + isNumber(type)); } writeCString(grid_symbols.bar, out); @@ -332,8 +359,33 @@ void PrettyBlockOutputFormat::writeChunk(const Chunk & chunk, PortKind port_kind /// Write left blank writeString(String(row_number_width, ' '), out); } - writeString(bottom_separator_s, out); + /// output column names in the footer + if ((num_rows >= format_settings.pretty.output_format_pretty_display_footer_column_names_min_rows) && format_settings.pretty.output_format_pretty_display_footer_column_names) + { + writeString(footer_top_separator_s, out); + + if (format_settings.pretty.output_format_pretty_row_numbers) + { + /// Write left blank + writeString(String(row_number_width, ' '), out); + } + + /// output header names + write_names(); + + if (format_settings.pretty.output_format_pretty_row_numbers) + { + /// Write left blank + writeString(String(row_number_width, ' '), out); + } + + writeString(footer_bottom_separator_s, out); + } + else + { + writeString(bottom_separator_s, out); + } total_rows += num_rows; } diff --git a/src/Processors/Formats/Impl/PrettyCompactBlockOutputFormat.cpp b/src/Processors/Formats/Impl/PrettyCompactBlockOutputFormat.cpp index e1cbf69dbf0..57ec23e7e3b 100644 --- a/src/Processors/Formats/Impl/PrettyCompactBlockOutputFormat.cpp +++ b/src/Processors/Formats/Impl/PrettyCompactBlockOutputFormat.cpp @@ -57,7 +57,8 @@ PrettyCompactBlockOutputFormat::PrettyCompactBlockOutputFormat(WriteBuffer & out void PrettyCompactBlockOutputFormat::writeHeader( const Block & block, const Widths & max_widths, - const Widths & name_widths) + const Widths & name_widths, + const bool write_footer) { if (format_settings.pretty.output_format_pretty_row_numbers) { @@ -70,14 +71,20 @@ void PrettyCompactBlockOutputFormat::writeHeader( ascii_grid_symbols; /// Names - writeCString(grid_symbols.left_top_corner, out); + if (write_footer) + writeCString(grid_symbols.left_bottom_corner, out); + else + writeCString(grid_symbols.left_top_corner, out); writeCString(grid_symbols.dash, out); for (size_t i = 0; i < max_widths.size(); ++i) { if (i != 0) { writeCString(grid_symbols.dash, out); - writeCString(grid_symbols.top_separator, out); + if (write_footer) + writeCString(grid_symbols.bottom_separator, out); + else + writeCString(grid_symbols.top_separator, out); writeCString(grid_symbols.dash, out); } @@ -107,7 +114,10 @@ void PrettyCompactBlockOutputFormat::writeHeader( } } writeCString(grid_symbols.dash, out); - writeCString(grid_symbols.right_top_corner, out); + if (write_footer) + writeCString(grid_symbols.right_bottom_corner, out); + else + writeCString(grid_symbols.right_top_corner, out); writeCString("\n", out); } @@ -195,13 +205,19 @@ void PrettyCompactBlockOutputFormat::writeChunk(const Chunk & chunk, PortKind po Widths name_widths; calculateWidths(header, chunk, widths, max_widths, name_widths); - writeHeader(header, max_widths, name_widths); + writeHeader(header, max_widths, name_widths, false); for (size_t i = 0; i < num_rows && total_rows + i < max_rows; ++i) writeRow(i, header, chunk, widths, max_widths); - - writeBottom(max_widths); + if ((num_rows >= format_settings.pretty.output_format_pretty_display_footer_column_names_min_rows) && format_settings.pretty.output_format_pretty_display_footer_column_names) + { + writeHeader(header, max_widths, name_widths, true); + } + else + { + writeBottom(max_widths); + } total_rows += num_rows; } diff --git a/src/Processors/Formats/Impl/PrettyCompactBlockOutputFormat.h b/src/Processors/Formats/Impl/PrettyCompactBlockOutputFormat.h index 911fc2e950c..b0b7c2ad8f4 100644 --- a/src/Processors/Formats/Impl/PrettyCompactBlockOutputFormat.h +++ b/src/Processors/Formats/Impl/PrettyCompactBlockOutputFormat.h @@ -17,7 +17,7 @@ public: String getName() const override { return "PrettyCompactBlockOutputFormat"; } private: - void writeHeader(const Block & block, const Widths & max_widths, const Widths & name_widths); + void writeHeader(const Block & block, const Widths & max_widths, const Widths & name_widths, bool write_footer); void writeBottom(const Widths & max_widths); void writeRow( size_t row_num, diff --git a/src/Processors/Formats/Impl/PrettySpaceBlockOutputFormat.cpp b/src/Processors/Formats/Impl/PrettySpaceBlockOutputFormat.cpp index 3f224f034aa..0a594b54b12 100644 --- a/src/Processors/Formats/Impl/PrettySpaceBlockOutputFormat.cpp +++ b/src/Processors/Formats/Impl/PrettySpaceBlockOutputFormat.cpp @@ -36,39 +36,46 @@ void PrettySpaceBlockOutputFormat::writeChunk(const Chunk & chunk, PortKind port if (format_settings.pretty.output_format_pretty_row_numbers) writeString(String(row_number_width, ' '), out); /// Names - for (size_t i = 0; i < num_columns; ++i) + auto write_names = [&](const bool is_footer) -> void { - if (i != 0) - writeCString(" ", out); - else - writeChar(' ', out); - - const ColumnWithTypeAndName & col = header.getByPosition(i); - - if (col.type->shouldAlignRightInPrettyFormats()) + for (size_t i = 0; i < num_columns; ++i) { - for (ssize_t k = 0; k < std::max(0z, static_cast(max_widths[i] - name_widths[i])); ++k) + if (i != 0) + writeCString(" ", out); + else writeChar(' ', out); - if (color) - writeCString("\033[1m", out); - writeString(col.name, out); - if (color) - writeCString("\033[0m", out); - } - else - { - if (color) - writeCString("\033[1m", out); - writeString(col.name, out); - if (color) - writeCString("\033[0m", out); + const ColumnWithTypeAndName & col = header.getByPosition(i); - for (ssize_t k = 0; k < std::max(0z, static_cast(max_widths[i] - name_widths[i])); ++k) - writeChar(' ', out); + if (col.type->shouldAlignRightInPrettyFormats()) + { + for (ssize_t k = 0; k < std::max(0z, static_cast(max_widths[i] - name_widths[i])); ++k) + writeChar(' ', out); + + if (color) + writeCString("\033[1m", out); + writeString(col.name, out); + if (color) + writeCString("\033[0m", out); + } + else + { + if (color) + writeCString("\033[1m", out); + writeString(col.name, out); + if (color) + writeCString("\033[0m", out); + + for (ssize_t k = 0; k < std::max(0z, static_cast(max_widths[i] - name_widths[i])); ++k) + writeChar(' ', out); + } } - } - writeCString("\n\n", out); + if (!is_footer) + writeCString("\n\n", out); + else + writeCString("\n", out); + }; + write_names(false); for (size_t row = 0; row < num_rows && total_rows + row < max_rows; ++row) { @@ -95,11 +102,19 @@ void PrettySpaceBlockOutputFormat::writeChunk(const Chunk & chunk, PortKind port writeValueWithPadding( *columns[column], *serializations[column], row, cur_width, max_widths[column], cut_to_width, type.shouldAlignRightInPrettyFormats(), isNumber(type)); } - writeReadableNumberTip(chunk); writeChar('\n', out); } + /// Write blank line between last row and footer + if ((num_rows >= format_settings.pretty.output_format_pretty_display_footer_column_names_min_rows) && format_settings.pretty.output_format_pretty_display_footer_column_names) + writeCString("\n", out); + /// Write left blank + if ((num_rows >= format_settings.pretty.output_format_pretty_display_footer_column_names_min_rows) && format_settings.pretty.output_format_pretty_row_numbers && format_settings.pretty.output_format_pretty_display_footer_column_names) + writeString(String(row_number_width, ' '), out); + /// Write footer + if ((num_rows >= format_settings.pretty.output_format_pretty_display_footer_column_names_min_rows) && format_settings.pretty.output_format_pretty_display_footer_column_names) + write_names(true); total_rows += num_rows; } diff --git a/src/Server/HTTPHandler.cpp b/src/Server/HTTPHandler.cpp index 02d0959ff50..f6ca69813ae 100644 --- a/src/Server/HTTPHandler.cpp +++ b/src/Server/HTTPHandler.cpp @@ -30,7 +30,6 @@ #include #include #include -#include #include #include #include @@ -44,6 +43,7 @@ #include #include #include +#include #include #include #include @@ -53,7 +53,10 @@ #include #include #include +#include #include +#include +#include #if USE_SSL #include @@ -338,11 +341,11 @@ void HTTPHandler::pushDelayedResults(Output & used_output) } -HTTPHandler::HTTPHandler(IServer & server_, const std::string & name, const std::optional & content_type_override_) +HTTPHandler::HTTPHandler(IServer & server_, const std::string & name, const HTTPResponseHeaderSetup & http_response_headers_override_) : server(server_) , log(getLogger(name)) , default_settings(server.context()->getSettingsRef()) - , content_type_override(content_type_override_) + , http_response_headers_override(http_response_headers_override_) { server_display_name = server.config().getString("display_name", getFQDNOrHostName()); } @@ -670,8 +673,7 @@ void HTTPHandler::processQuery( { auto tmp_data = std::make_shared(server.context()->getTempDataOnDisk()); - auto create_tmp_disk_buffer = [tmp_data] (const WriteBufferPtr &) -> WriteBufferPtr - { + auto create_tmp_disk_buffer = [tmp_data] (const WriteBufferPtr &) -> WriteBufferPtr { return tmp_data->createRawStream(); }; @@ -893,13 +895,14 @@ void HTTPHandler::processQuery( customizeContext(request, context, *in_post_maybe_compressed); in = has_external_data ? std::move(in_param) : std::make_unique(*in_param, *in_post_maybe_compressed); + applyHTTPResponseHeaders(response, http_response_headers_override); + auto set_query_result = [&response, this] (const QueryResultDetails & details) { response.add("X-ClickHouse-Query-Id", details.query_id); - if (content_type_override) - response.setContentType(*content_type_override); - else if (details.content_type) + if (!(http_response_headers_override && http_response_headers_override->contains(Poco::Net::HTTPMessage::CONTENT_TYPE)) + && details.content_type) response.setContentType(*details.content_type); if (details.format) @@ -1185,8 +1188,9 @@ void HTTPHandler::handleRequest(HTTPServerRequest & request, HTTPServerResponse used_output.finalize(); } -DynamicQueryHandler::DynamicQueryHandler(IServer & server_, const std::string & param_name_, const std::optional& content_type_override_) - : HTTPHandler(server_, "DynamicQueryHandler", content_type_override_), param_name(param_name_) +DynamicQueryHandler::DynamicQueryHandler( + IServer & server_, const std::string & param_name_, const HTTPResponseHeaderSetup & http_response_headers_override_) + : HTTPHandler(server_, "DynamicQueryHandler", http_response_headers_override_), param_name(param_name_) { } @@ -1247,8 +1251,8 @@ PredefinedQueryHandler::PredefinedQueryHandler( const std::string & predefined_query_, const CompiledRegexPtr & url_regex_, const std::unordered_map & header_name_with_regex_, - const std::optional & content_type_override_) - : HTTPHandler(server_, "PredefinedQueryHandler", content_type_override_) + const HTTPResponseHeaderSetup & http_response_headers_override_) + : HTTPHandler(server_, "PredefinedQueryHandler", http_response_headers_override_) , receive_params(receive_params_) , predefined_query(predefined_query_) , url_regex(url_regex_) @@ -1340,14 +1344,10 @@ HTTPRequestHandlerFactoryPtr createDynamicHandlerFactory(IServer & server, { auto query_param_name = config.getString(config_prefix + ".handler.query_param_name", "query"); - std::optional content_type_override; - if (config.has(config_prefix + ".handler.content_type")) - content_type_override = config.getString(config_prefix + ".handler.content_type"); + HTTPResponseHeaderSetup http_response_headers_override = parseHTTPResponseHeaders(config, config_prefix); - auto creator = [&server, query_param_name, content_type_override] () -> std::unique_ptr - { - return std::make_unique(server, query_param_name, content_type_override); - }; + auto creator = [&server, query_param_name, http_response_headers_override]() -> std::unique_ptr + { return std::make_unique(server, query_param_name, http_response_headers_override); }; auto factory = std::make_shared>(std::move(creator)); factory->addFiltersFromConfig(config, config_prefix); @@ -1402,9 +1402,7 @@ HTTPRequestHandlerFactoryPtr createPredefinedHandlerFactory(IServer & server, headers_name_with_regex.emplace(std::make_pair(header_name, regex)); } - std::optional content_type_override; - if (config.has(config_prefix + ".handler.content_type")) - content_type_override = config.getString(config_prefix + ".handler.content_type"); + HTTPResponseHeaderSetup http_response_headers_override = parseHTTPResponseHeaders(config, config_prefix); std::shared_ptr> factory; @@ -1424,12 +1422,12 @@ HTTPRequestHandlerFactoryPtr createPredefinedHandlerFactory(IServer & server, predefined_query, regex, headers_name_with_regex, - content_type_override] + http_response_headers_override] -> std::unique_ptr { return std::make_unique( server, analyze_receive_params, predefined_query, regex, - headers_name_with_regex, content_type_override); + headers_name_with_regex, http_response_headers_override); }; factory = std::make_shared>(std::move(creator)); factory->addFiltersFromConfig(config, config_prefix); @@ -1442,12 +1440,12 @@ HTTPRequestHandlerFactoryPtr createPredefinedHandlerFactory(IServer & server, analyze_receive_params, predefined_query, headers_name_with_regex, - content_type_override] + http_response_headers_override] -> std::unique_ptr { return std::make_unique( server, analyze_receive_params, predefined_query, CompiledRegexPtr{}, - headers_name_with_regex, content_type_override); + headers_name_with_regex, http_response_headers_override); }; factory = std::make_shared>(std::move(creator)); diff --git a/src/Server/HTTPHandler.h b/src/Server/HTTPHandler.h index a96402247a2..c5551102f7a 100644 --- a/src/Server/HTTPHandler.h +++ b/src/Server/HTTPHandler.h @@ -1,5 +1,8 @@ #pragma once +#include +#include +#include #include #include #include @@ -10,6 +13,8 @@ #include #include +#include "HTTPResponseHeaderWriter.h" + namespace CurrentMetrics { extern const Metric HTTPConnection; @@ -31,7 +36,7 @@ using CompiledRegexPtr = std::shared_ptr; class HTTPHandler : public HTTPRequestHandler { public: - HTTPHandler(IServer & server_, const std::string & name, const std::optional & content_type_override_); + HTTPHandler(IServer & server_, const std::string & name, const HTTPResponseHeaderSetup & http_response_headers_override_); ~HTTPHandler() override; void handleRequest(HTTPServerRequest & request, HTTPServerResponse & response, const ProfileEvents::Event & write_event) override; @@ -113,8 +118,8 @@ private: /// See settings http_max_fields, http_max_field_name_size, http_max_field_value_size in HTMLForm. const Settings & default_settings; - /// Overrides Content-Type provided by the format of the response. - std::optional content_type_override; + /// Overrides for response headers. + HTTPResponseHeaderSetup http_response_headers_override; // session is reset at the end of each request/response. std::unique_ptr session; @@ -162,8 +167,12 @@ class DynamicQueryHandler : public HTTPHandler { private: std::string param_name; + public: - explicit DynamicQueryHandler(IServer & server_, const std::string & param_name_ = "query", const std::optional& content_type_override_ = std::nullopt); + explicit DynamicQueryHandler( + IServer & server_, + const std::string & param_name_ = "query", + const HTTPResponseHeaderSetup & http_response_headers_override_ = std::nullopt); std::string getQuery(HTTPServerRequest & request, HTMLForm & params, ContextMutablePtr context) override; @@ -177,11 +186,15 @@ private: std::string predefined_query; CompiledRegexPtr url_regex; std::unordered_map header_name_with_capture_regex; + public: PredefinedQueryHandler( - IServer & server_, const NameSet & receive_params_, const std::string & predefined_query_ - , const CompiledRegexPtr & url_regex_, const std::unordered_map & header_name_with_regex_ - , const std::optional & content_type_override_); + IServer & server_, + const NameSet & receive_params_, + const std::string & predefined_query_, + const CompiledRegexPtr & url_regex_, + const std::unordered_map & header_name_with_regex_, + const HTTPResponseHeaderSetup & http_response_headers_override_ = std::nullopt); void customizeContext(HTTPServerRequest & request, ContextMutablePtr context, ReadBuffer & body) override; diff --git a/src/Server/HTTPHandlerFactory.cpp b/src/Server/HTTPHandlerFactory.cpp index 9a67e576345..5344b2d024b 100644 --- a/src/Server/HTTPHandlerFactory.cpp +++ b/src/Server/HTTPHandlerFactory.cpp @@ -74,7 +74,8 @@ static auto createPingHandlerFactory(IServer & server) auto creator = [&server]() -> std::unique_ptr { constexpr auto ping_response_expression = "Ok.\n"; - return std::make_unique(server, ping_response_expression); + return std::make_unique( + server, ping_response_expression, parseHTTPResponseHeaders("text/html; charset=UTF-8")); }; return std::make_shared>(std::move(creator)); } @@ -214,7 +215,8 @@ void addCommonDefaultHandlersFactory(HTTPRequestHandlerFactoryMain & factory, IS auto root_creator = [&server]() -> std::unique_ptr { constexpr auto root_response_expression = "config://http_server_default_response"; - return std::make_unique(server, root_response_expression); + return std::make_unique( + server, root_response_expression, parseHTTPResponseHeaders("text/html; charset=UTF-8")); }; auto root_handler = std::make_shared>(std::move(root_creator)); root_handler->attachStrictPath("/"); diff --git a/src/Server/HTTPResponseHeaderWriter.cpp b/src/Server/HTTPResponseHeaderWriter.cpp new file mode 100644 index 00000000000..fd29af5bdc7 --- /dev/null +++ b/src/Server/HTTPResponseHeaderWriter.cpp @@ -0,0 +1,69 @@ +#include "HTTPResponseHeaderWriter.h" +#include +#include +#include + +namespace DB +{ + +std::unordered_map +baseParseHTTPResponseHeaders(const Poco::Util::AbstractConfiguration & config, const std::string & config_prefix) +{ + std::unordered_map http_response_headers_override; + String http_response_headers_key = config_prefix + ".handler.http_response_headers"; + String http_response_headers_key_prefix = http_response_headers_key + "."; + if (config.has(http_response_headers_key)) + { + Poco::Util::AbstractConfiguration::Keys keys; + config.keys(http_response_headers_key, keys); + for (const auto & key : keys) + { + http_response_headers_override[key] = config.getString(http_response_headers_key_prefix + key); + } + } + if (config.has(config_prefix + ".handler.content_type")) + http_response_headers_override[Poco::Net::HTTPMessage::CONTENT_TYPE] = config.getString(config_prefix + ".handler.content_type"); + + return http_response_headers_override; +} + +HTTPResponseHeaderSetup parseHTTPResponseHeaders(const Poco::Util::AbstractConfiguration & config, const std::string & config_prefix) +{ + std::unordered_map http_response_headers_override = baseParseHTTPResponseHeaders(config, config_prefix); + + if (http_response_headers_override.empty()) + return {}; + + return std::move(http_response_headers_override); +} + +std::unordered_map parseHTTPResponseHeaders( + const Poco::Util::AbstractConfiguration & config, const std::string & config_prefix, const std::string & default_content_type) +{ + std::unordered_map http_response_headers_override = baseParseHTTPResponseHeaders(config, config_prefix); + + if (!http_response_headers_override.contains(Poco::Net::HTTPMessage::CONTENT_TYPE)) + http_response_headers_override[Poco::Net::HTTPMessage::CONTENT_TYPE] = default_content_type; + + return http_response_headers_override; +} + +std::unordered_map parseHTTPResponseHeaders(const std::string & default_content_type) +{ + return {{{Poco::Net::HTTPMessage::CONTENT_TYPE, default_content_type}}}; +} + +void applyHTTPResponseHeaders(Poco::Net::HTTPResponse & response, const HTTPResponseHeaderSetup & setup) +{ + if (setup) + for (const auto & [header_name, header_value] : *setup) + response.set(header_name, header_value); +} + +void applyHTTPResponseHeaders(Poco::Net::HTTPResponse & response, const std::unordered_map & setup) +{ + for (const auto & [header_name, header_value] : setup) + response.set(header_name, header_value); +} + +} diff --git a/src/Server/HTTPResponseHeaderWriter.h b/src/Server/HTTPResponseHeaderWriter.h new file mode 100644 index 00000000000..06281abb42d --- /dev/null +++ b/src/Server/HTTPResponseHeaderWriter.h @@ -0,0 +1,25 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +namespace DB +{ + +using HTTPResponseHeaderSetup = std::optional>; + +HTTPResponseHeaderSetup parseHTTPResponseHeaders(const Poco::Util::AbstractConfiguration & config, const std::string & config_prefix); + +std::unordered_map parseHTTPResponseHeaders( + const Poco::Util::AbstractConfiguration & config, const std::string & config_prefix, const std::string & default_content_type); + +std::unordered_map parseHTTPResponseHeaders(const std::string & default_content_type); + +void applyHTTPResponseHeaders(Poco::Net::HTTPResponse & response, const HTTPResponseHeaderSetup & setup); + +void applyHTTPResponseHeaders(Poco::Net::HTTPResponse & response, const std::unordered_map & setup); +} diff --git a/src/Server/StaticRequestHandler.cpp b/src/Server/StaticRequestHandler.cpp index 67bf3875de4..331b7a84857 100644 --- a/src/Server/StaticRequestHandler.cpp +++ b/src/Server/StaticRequestHandler.cpp @@ -2,7 +2,7 @@ #include "IServer.h" #include "HTTPHandlerFactory.h" -#include "HTTPHandlerRequestFilter.h" +#include "HTTPResponseHeaderWriter.h" #include #include @@ -14,6 +14,7 @@ #include +#include #include #include #include @@ -94,7 +95,7 @@ void StaticRequestHandler::handleRequest(HTTPServerRequest & request, HTTPServer try { - response.setContentType(content_type); + applyHTTPResponseHeaders(response, http_response_headers_override); if (request.getVersion() == Poco::Net::HTTPServerRequest::HTTP_1_1) response.setChunkedTransferEncoding(true); @@ -155,8 +156,9 @@ void StaticRequestHandler::writeResponse(WriteBuffer & out) writeString(response_expression, out); } -StaticRequestHandler::StaticRequestHandler(IServer & server_, const String & expression, int status_, const String & content_type_) - : server(server_), status(status_), content_type(content_type_), response_expression(expression) +StaticRequestHandler::StaticRequestHandler( + IServer & server_, const String & expression, const std::unordered_map & http_response_headers_override_, int status_) + : server(server_), status(status_), http_response_headers_override(http_response_headers_override_), response_expression(expression) { } @@ -166,12 +168,12 @@ HTTPRequestHandlerFactoryPtr createStaticHandlerFactory(IServer & server, { int status = config.getInt(config_prefix + ".handler.status", 200); std::string response_content = config.getRawString(config_prefix + ".handler.response_content", "Ok.\n"); - std::string response_content_type = config.getString(config_prefix + ".handler.content_type", "text/plain; charset=UTF-8"); - auto creator = [&server, response_content, status, response_content_type]() -> std::unique_ptr - { - return std::make_unique(server, response_content, status, response_content_type); - }; + std::unordered_map http_response_headers_override + = parseHTTPResponseHeaders(config, config_prefix, "text/plain; charset=UTF-8"); + + auto creator = [&server, http_response_headers_override, response_content, status]() -> std::unique_ptr + { return std::make_unique(server, response_content, http_response_headers_override, status); }; auto factory = std::make_shared>(std::move(creator)); diff --git a/src/Server/StaticRequestHandler.h b/src/Server/StaticRequestHandler.h index 38d774bb0aa..41fb395d969 100644 --- a/src/Server/StaticRequestHandler.h +++ b/src/Server/StaticRequestHandler.h @@ -1,9 +1,9 @@ #pragma once +#include #include #include - namespace DB { @@ -17,15 +17,16 @@ private: IServer & server; int status; - String content_type; + /// Overrides for response headers. + std::unordered_map http_response_headers_override; String response_expression; public: StaticRequestHandler( IServer & server, const String & expression, - int status_ = 200, - const String & content_type_ = "text/html; charset=UTF-8"); + const std::unordered_map & http_response_headers_override_, + int status_ = 200); void writeResponse(WriteBuffer & out); diff --git a/src/Storages/ObjectStorage/HDFS/WriteBufferFromHDFS.cpp b/src/Storages/ObjectStorage/HDFS/WriteBufferFromHDFS.cpp index 2c14b38ce01..8277a769a11 100644 --- a/src/Storages/ObjectStorage/HDFS/WriteBufferFromHDFS.cpp +++ b/src/Storages/ObjectStorage/HDFS/WriteBufferFromHDFS.cpp @@ -132,11 +132,11 @@ void WriteBufferFromHDFS::sync() } -void WriteBufferFromHDFS::finalizeImpl() +WriteBufferFromHDFS::~WriteBufferFromHDFS() { try { - next(); + finalize(); } catch (...) { @@ -144,11 +144,5 @@ void WriteBufferFromHDFS::finalizeImpl() } } - -WriteBufferFromHDFS::~WriteBufferFromHDFS() -{ - finalize(); -} - } #endif diff --git a/src/Storages/ObjectStorage/HDFS/WriteBufferFromHDFS.h b/src/Storages/ObjectStorage/HDFS/WriteBufferFromHDFS.h index 71e6e55addc..e3f0ae96a8f 100644 --- a/src/Storages/ObjectStorage/HDFS/WriteBufferFromHDFS.h +++ b/src/Storages/ObjectStorage/HDFS/WriteBufferFromHDFS.h @@ -38,8 +38,6 @@ public: std::string getFileName() const override { return filename; } private: - void finalizeImpl() override; - struct WriteBufferFromHDFSImpl; std::unique_ptr impl; const std::string filename; diff --git a/src/Storages/ObjectStorage/StorageObjectStorageSink.cpp b/src/Storages/ObjectStorage/StorageObjectStorageSink.cpp index 0a3cf19a590..d13aec4a4f6 100644 --- a/src/Storages/ObjectStorage/StorageObjectStorageSink.cpp +++ b/src/Storages/ObjectStorage/StorageObjectStorageSink.cpp @@ -83,7 +83,6 @@ void StorageObjectStorageSink::finalize() { writer->finalize(); writer->flush(); - write_buf->finalize(); } catch (...) { @@ -91,6 +90,8 @@ void StorageObjectStorageSink::finalize() release(); throw; } + + write_buf->finalize(); } void StorageObjectStorageSink::release() diff --git a/src/Storages/StorageFile.cpp b/src/Storages/StorageFile.cpp index aaf84f6f82c..16c248f1b7b 100644 --- a/src/Storages/StorageFile.cpp +++ b/src/Storages/StorageFile.cpp @@ -1823,7 +1823,6 @@ private: { writer->finalize(); writer->flush(); - write_buf->finalize(); } catch (...) { @@ -1831,12 +1830,14 @@ private: release(); throw; } + + write_buf->finalize(); } void release() { writer.reset(); - write_buf->finalize(); + write_buf.reset(); } StorageMetadataPtr metadata_snapshot; diff --git a/src/Storages/StorageURL.cpp b/src/Storages/StorageURL.cpp index 9302e7ef3e5..f8424bc3d1b 100644 --- a/src/Storages/StorageURL.cpp +++ b/src/Storages/StorageURL.cpp @@ -609,7 +609,6 @@ void StorageURLSink::finalize() { writer->finalize(); writer->flush(); - write_buf->finalize(); } catch (...) { @@ -617,12 +616,14 @@ void StorageURLSink::finalize() release(); throw; } + + write_buf->finalize(); } void StorageURLSink::release() { writer.reset(); - write_buf->finalize(); + write_buf.reset(); } class PartitionedStorageURLSink : public PartitionedSink diff --git a/tests/analyzer_tech_debt.txt b/tests/analyzer_tech_debt.txt index 5f798158a41..1f7357b6494 100644 --- a/tests/analyzer_tech_debt.txt +++ b/tests/analyzer_tech_debt.txt @@ -9,3 +9,5 @@ 01287_max_execution_speed # Check after ConstantNode refactoring 02154_parser_backtracking +02944_variant_as_common_type +02942_variant_cast diff --git a/tests/ci/artifacts_helper.py b/tests/ci/artifacts_helper.py index 37abf0bdefb..503ba2e1ec4 100644 --- a/tests/ci/artifacts_helper.py +++ b/tests/ci/artifacts_helper.py @@ -15,7 +15,7 @@ from github.Commit import Commit from build_download_helper import download_build_with_progress from commit_status_helper import post_commit_status from compress_files import SUFFIX, compress_fast, decompress_fast -from env_helper import CI, RUNNER_TEMP, S3_BUILDS_BUCKET +from env_helper import IS_CI, RUNNER_TEMP, S3_BUILDS_BUCKET from git_helper import SHA_REGEXP from report import FOOTER_HTML_TEMPLATE, HEAD_HTML_TEMPLATE, SUCCESS from s3_helper import S3Helper @@ -131,7 +131,7 @@ class ArtifactsHelper: post_commit_status(commit, SUCCESS, url, "Artifacts for workflow", "Artifacts") def _regenerate_index(self) -> None: - if CI: + if IS_CI: files = self._get_s3_objects() else: files = self._get_local_s3_objects() diff --git a/tests/ci/ast_fuzzer_check.py b/tests/ci/ast_fuzzer_check.py index b88a9476a6d..8bc0f51dfc7 100644 --- a/tests/ci/ast_fuzzer_check.py +++ b/tests/ci/ast_fuzzer_check.py @@ -6,7 +6,7 @@ import subprocess import sys from pathlib import Path -from build_download_helper import get_build_name_for_check, read_build_urls +from build_download_helper import read_build_urls from clickhouse_helper import CiLogsCredentials from docker_images_helper import DockerImage, get_docker_image, pull_image from env_helper import REPORT_PATH, TEMP_PATH @@ -14,6 +14,7 @@ from pr_info import PRInfo from report import FAIL, FAILURE, OK, SUCCESS, JobReport, TestResult from stopwatch import Stopwatch from tee_popen import TeePopen +from ci_config import CI IMAGE_NAME = "clickhouse/fuzzer" @@ -64,7 +65,7 @@ def main(): docker_image = pull_image(get_docker_image(IMAGE_NAME)) - build_name = get_build_name_for_check(check_name) + build_name = CI.get_required_build_name(check_name) urls = read_build_urls(build_name, reports_path) if not urls: raise ValueError("No build URLs found") diff --git a/tests/ci/bugfix_validate_check.py b/tests/ci/bugfix_validate_check.py index d41fdaf05ff..71b18572938 100644 --- a/tests/ci/bugfix_validate_check.py +++ b/tests/ci/bugfix_validate_check.py @@ -7,7 +7,7 @@ import sys from pathlib import Path from typing import List, Sequence, Tuple -from ci_config import JobNames +from ci_config import CI from ci_utils import normalize_string from env_helper import TEMP_PATH from functional_test_check import NO_CHANGES_MSG @@ -92,16 +92,19 @@ def main(): logging.basicConfig(level=logging.INFO) # args = parse_args() stopwatch = Stopwatch() - jobs_to_validate = [JobNames.STATELESS_TEST_RELEASE, JobNames.INTEGRATION_TEST] + jobs_to_validate = [ + CI.JobNames.STATELESS_TEST_RELEASE, + CI.JobNames.INTEGRATION_TEST, + ] functional_job_report_file = Path(TEMP_PATH) / "functional_test_job_report.json" integration_job_report_file = Path(TEMP_PATH) / "integration_test_job_report.json" jobs_report_files = { - JobNames.STATELESS_TEST_RELEASE: functional_job_report_file, - JobNames.INTEGRATION_TEST: integration_job_report_file, + CI.JobNames.STATELESS_TEST_RELEASE: functional_job_report_file, + CI.JobNames.INTEGRATION_TEST: integration_job_report_file, } jobs_scripts = { - JobNames.STATELESS_TEST_RELEASE: "functional_test_check.py", - JobNames.INTEGRATION_TEST: "integration_test_check.py", + CI.JobNames.STATELESS_TEST_RELEASE: "functional_test_check.py", + CI.JobNames.INTEGRATION_TEST: "integration_test_check.py", } for test_job in jobs_to_validate: diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 260b77b0ee5..39f34ed9ccf 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -9,7 +9,7 @@ from pathlib import Path from typing import Tuple import docker_images_helper -from ci_config import CI_CONFIG, BuildConfig +from ci_config import CI from env_helper import REPO_COPY, S3_BUILDS_BUCKET, TEMP_PATH from git_helper import Git from lambda_shared_package.lambda_shared.pr import Labels @@ -27,7 +27,7 @@ IMAGE_NAME = "clickhouse/binary-builder" BUILD_LOG_NAME = "build_log.log" -def _can_export_binaries(build_config: BuildConfig) -> bool: +def _can_export_binaries(build_config: CI.BuildConfig) -> bool: if build_config.package_type != "deb": return False if build_config.sanitizer != "": @@ -38,7 +38,7 @@ def _can_export_binaries(build_config: BuildConfig) -> bool: def get_packager_cmd( - build_config: BuildConfig, + build_config: CI.BuildConfig, packager_path: Path, output_path: Path, build_version: str, @@ -147,7 +147,8 @@ def main(): stopwatch = Stopwatch() build_name = args.build_name - build_config = CI_CONFIG.build_config[build_name] + build_config = CI.JOB_CONFIGS[build_name].build_config + assert build_config temp_path = Path(TEMP_PATH) temp_path.mkdir(parents=True, exist_ok=True) diff --git a/tests/ci/build_download_helper.py b/tests/ci/build_download_helper.py index a641f9f4544..8482abb26e0 100644 --- a/tests/ci/build_download_helper.py +++ b/tests/ci/build_download_helper.py @@ -10,7 +10,7 @@ from typing import Any, Callable, List, Optional, Union import requests -from ci_config import CI_CONFIG +from ci_config import CI try: # A work around for scripts using this downloading module without required deps @@ -122,10 +122,6 @@ def get_gh_api( raise APIException(f"Unable to request data from GH API: {url}") from exc -def get_build_name_for_check(check_name: str) -> str: - return CI_CONFIG.test_configs[check_name].required_build - - def read_build_urls(build_name: str, reports_path: Union[Path, str]) -> List[str]: for root, _, files in os.walk(reports_path): for file in files: @@ -210,7 +206,7 @@ def download_builds_filter( result_path: Path, filter_fn: Callable[[str], bool] = lambda _: True, ) -> None: - build_name = get_build_name_for_check(check_name) + build_name = CI.get_required_build_name(check_name) urls = read_build_urls(build_name, reports_path) logger.info("The build report for %s contains the next URLs: %s", build_name, urls) @@ -247,7 +243,7 @@ def download_clickhouse_binary( def get_clickhouse_binary_url( check_name: str, reports_path: Union[Path, str] ) -> Optional[str]: - build_name = get_build_name_for_check(check_name) + build_name = CI.get_required_build_name(check_name) urls = read_build_urls(build_name, reports_path) logger.info("The build report for %s contains the next URLs: %s", build_name, urls) for url in urls: diff --git a/tests/ci/build_report_check.py b/tests/ci/build_report_check.py index 1d734fbb3f8..664f6a7cbb9 100644 --- a/tests/ci/build_report_check.py +++ b/tests/ci/build_report_check.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 +import argparse import json import logging import os @@ -6,7 +7,6 @@ import sys from pathlib import Path from typing import List -from ci_config import CI_CONFIG, Build from env_helper import ( GITHUB_JOB_URL, GITHUB_REPOSITORY, @@ -14,7 +14,7 @@ from env_helper import ( REPORT_PATH, TEMP_PATH, CI_CONFIG_PATH, - CI, + IS_CI, ) from pr_info import PRInfo from report import ( @@ -25,8 +25,10 @@ from report import ( JobReport, create_build_html_report, get_worst_status, + FAILURE, ) from stopwatch import Stopwatch +from ci_config import CI # Old way to read the neads_data NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "") @@ -46,16 +48,13 @@ def main(): "\n ".join(p.as_posix() for p in reports_path.rglob("*.json")), ) - build_check_name = sys.argv[1] + build_check_name = CI.JobNames.BUILD_CHECK pr_info = PRInfo() - builds_for_check = CI_CONFIG.get_builds_for_report( - build_check_name, - release=pr_info.is_release, - backport=pr_info.head_ref.startswith("backport/"), - ) - if CI: + args = parse_args() + + if (CI_CONFIG_PATH or IS_CI) and not args.reports: # In CI only specific builds might be manually selected, or some wf does not build all builds. # Filtering @builds_for_check to verify only builds that are present in the current CI workflow with open(CI_CONFIG_PATH, encoding="utf-8") as jfd: @@ -64,8 +63,12 @@ def main(): ci_config["jobs_data"]["jobs_to_skip"] + ci_config["jobs_data"]["jobs_to_do"] ) - builds_for_check = [job for job in builds_for_check if job in all_ci_jobs] - print(f"NOTE: following build reports will be accounted: [{builds_for_check}]") + builds_for_check = [job for job in CI.BuildNames if job in all_ci_jobs] + print(f"NOTE: following build reports will be checked: [{builds_for_check}]") + else: + builds_for_check = parse_args().reports + for job in builds_for_check: + assert job in CI.BuildNames, "Builds must be known build job names" required_builds = len(builds_for_check) missing_builds = 0 @@ -77,8 +80,8 @@ def main(): build_name, pr_info.number, pr_info.head_ref ) if not build_result: - if build_name == Build.FUZZERS: - logging.info("Build [%s] is missing - skip", Build.FUZZERS) + if build_name == CI.BuildNames.FUZZERS: + logging.info("Build [%s] is missing - skip", CI.BuildNames.FUZZERS) continue logging.warning("Build results for %s is missing", build_name) build_result = BuildResult.missing_result("missing") @@ -132,17 +135,16 @@ def main(): # Check if there are no builds at all, do not override bad status if summary_status == SUCCESS: if missing_builds: - summary_status = PENDING + summary_status = FAILURE elif ok_groups == 0: summary_status = ERROR - addition = "" - if missing_builds: - addition = ( - f" ({required_builds - missing_builds} of {required_builds} builds are OK)" - ) + description = "" - description = f"{ok_groups}/{total_groups} artifact groups are OK{addition}" + if missing_builds: + description = f"{missing_builds} of {required_builds} builds are missing." + + description += f" {ok_groups}/{total_groups} artifact groups are OK" JobReport( description=description, @@ -158,5 +160,16 @@ def main(): sys.exit(1) +def parse_args(): + parser = argparse.ArgumentParser("Generates overall build report") + + parser.add_argument( + "--reports", + nargs="+", + help="List of build reports to check", + ) + return parser.parse_args() + + if __name__ == "__main__": main() diff --git a/tests/ci/ci.py b/tests/ci/ci.py index bb23de142df..135a4c91c56 100644 --- a/tests/ci/ci.py +++ b/tests/ci/ci.py @@ -13,14 +13,7 @@ from typing import Any, Dict, List, Optional import docker_images_helper import upload_result_helper from build_check import get_release_or_pr -from ci_config import ( - CI_CONFIG, - Build, - CILabels, - CIStages, - JobNames, - StatusNames, -) +from ci_config import CI from ci_metadata import CiMetadata from ci_utils import GHActions, normalize_string from clickhouse_helper import ( @@ -38,10 +31,11 @@ from commit_status_helper import ( get_commit, post_commit_status, set_status_comment, + get_commit_filtered_statuses, ) from digest_helper import DockerDigester from env_helper import ( - CI, + IS_CI, GITHUB_JOB_API_URL, GITHUB_REPOSITORY, GITHUB_RUN_ID, @@ -295,7 +289,7 @@ def _mark_success_action( batch: int, ) -> None: ci_cache = CiCache(s3, indata["jobs_data"]["digests"]) - job_config = CI_CONFIG.get_job_config(job) + job_config = CI.get_job_config(job) num_batches = job_config.num_batches # if batch is not provided - set to 0 batch = 0 if batch == -1 else batch @@ -305,7 +299,7 @@ def _mark_success_action( # FIXME: find generic design for propagating and handling job status (e.g. stop using statuses in GH api) # now job ca be build job w/o status data, any other job that exit with 0 with or w/o status data - if CI_CONFIG.is_build_job(job): + if CI.is_build_job(job): # there is no CommitStatus for build jobs # create dummy status relying on JobReport # FIXME: consider creating commit status for build jobs too, to treat everything the same way @@ -425,6 +419,7 @@ def _configure_jobs( pr_info: PRInfo, ci_settings: CiSettings, skip_jobs: bool, + dry_run: bool = False, ) -> CiCache: """ returns CICache instance with configured job's data @@ -436,10 +431,11 @@ def _configure_jobs( # get all jobs if not skip_jobs: - job_configs = CI_CONFIG.get_workflow_jobs_with_configs( + job_configs = CI.get_workflow_jobs_with_configs( is_mq=pr_info.is_merge_queue, is_docs_only=pr_info.has_changes_in_documentation_only(), is_master=pr_info.is_master, + is_pr=pr_info.is_pr, ) else: job_configs = {} @@ -457,7 +453,8 @@ def _configure_jobs( ci_cache = CiCache.calc_digests_and_create( s3, job_configs, - cache_enabled=not ci_settings.no_ci_cache and not skip_jobs and CI, + cache_enabled=not ci_settings.no_ci_cache and not skip_jobs and IS_CI, + dry_run=dry_run, ) ci_cache.update() ci_cache.apply(job_configs, is_release=pr_info.is_release) @@ -475,14 +472,14 @@ def _generate_ci_stage_config(jobs_data: Dict[str, Any]) -> Dict[str, Dict[str, result = {} # type: Dict[str, Any] stages_to_do = [] for job in jobs_data: - stage_type = CI_CONFIG.get_job_ci_stage(job) - if stage_type == CIStages.NA: + stage_type = CI.get_job_ci_stage(job) + if stage_type == CI.WorkflowStages.NA: continue if stage_type not in result: result[stage_type] = [] stages_to_do.append(stage_type) result[stage_type].append( - {"job_name": job, "runner_type": CI_CONFIG.get_runner_type(job)} + {"job_name": job, "runner_type": CI.JOB_CONFIGS[job].runner_type} ) result["stages_to_do"] = stages_to_do return result @@ -529,10 +526,10 @@ def _update_gh_statuses_action(indata: Dict, s3: S3Helper) -> None: if job not in jobs_to_skip and job not in jobs_to_do: # no need to create status for job that are not supposed to be executed continue - if CI_CONFIG.is_build_job(job): + if CI.is_build_job(job): # no GH status for build jobs continue - job_config = CI_CONFIG.get_job_config(job) + job_config = CI.get_job_config(job) if not job_config: # there might be a new job that does not exist on this branch - skip it continue @@ -558,7 +555,7 @@ def _fetch_commit_tokens(message: str, pr_info: PRInfo) -> List[str]: res = [ match for match in matches - if match in CILabels or match.startswith("job_") or match.startswith("batch_") + if match in CI.Tags or match.startswith("job_") or match.startswith("batch_") ] print(f"CI modifiers from commit message: [{res}]") res_2 = [] @@ -567,7 +564,7 @@ def _fetch_commit_tokens(message: str, pr_info: PRInfo) -> List[str]: res_2 = [ match for match in matches - if match in CILabels + if match in CI.Tags or match.startswith("job_") or match.startswith("batch_") ] @@ -643,7 +640,7 @@ def _upload_build_artifacts( print(f"Report file has been uploaded to [{report_url}]") # Upload master head's binaries - static_bin_name = CI_CONFIG.build_config[build_name].static_binary_name + static_bin_name = CI.get_build_config(build_name).static_binary_name if pr_info.is_master and static_bin_name: # Full binary with debug info: s3_path_full = "/".join((pr_info.base_ref, static_bin_name, "clickhouse-full")) @@ -838,15 +835,15 @@ def _add_build_to_version_history( def _run_test(job_name: str, run_command: str) -> int: assert ( - run_command or CI_CONFIG.get_job_config(job_name).run_command + run_command or CI.get_job_config(job_name).run_command ), "Run command must be provided as input argument or be configured in job config" env = os.environ.copy() - timeout = CI_CONFIG.get_job_config(job_name).timeout or None + timeout = CI.get_job_config(job_name).timeout or None if not run_command: run_command = "/".join( - (os.path.dirname(__file__), CI_CONFIG.get_job_config(job_name).run_command) + (os.path.dirname(__file__), CI.get_job_config(job_name).run_command) ) if ".py" in run_command and not run_command.startswith("python"): run_command = "python3 " + run_command @@ -913,13 +910,23 @@ def _cancel_pr_wf(s3: S3Helper, pr_number: int, cancel_sync: bool = False) -> No def _set_pending_statuses(pr_info: PRInfo) -> None: commit = get_commit(GitHub(get_best_robot_token(), per_page=100), pr_info.sha) try: - print("Set SYNC status to pending") - commit.create_status( - state=PENDING, - target_url="", - description="", - context=StatusNames.SYNC, - ) + found = False + statuses = get_commit_filtered_statuses(commit) + for commit_status in statuses: + if commit_status.context == CI.StatusNames.SYNC: + print( + f"Sync status found [{commit_status.state}], [{commit_status.description}] - won't be overwritten" + ) + found = True + break + if not found: + print("Set Sync status to pending") + commit.create_status( + state=PENDING, + target_url="", + description=CI.SyncState.PENDING, + context=CI.StatusNames.SYNC, + ) except Exception as ex: print(f"ERROR: failed to set GH commit status, ex: {ex}") @@ -952,7 +959,7 @@ def main() -> int: ### CONFIGURE action: start if args.configure: - if CI and pr_info.is_pr: + if IS_CI and pr_info.is_pr: # store meta on s3 (now we need it only for PRs) meta = CiMetadata(s3, pr_info.number, pr_info.head_ref) meta.run_id = int(GITHUB_RUN_ID) @@ -962,7 +969,7 @@ def main() -> int: args.commit_message or None, update_from_api=True ) - if ci_settings.no_merge_commit and CI: + if ci_settings.no_merge_commit and IS_CI: git_runner.run(f"{GIT_PREFIX} checkout {pr_info.sha}") git_ref = git_runner.run(f"{GIT_PREFIX} rev-parse HEAD") @@ -985,18 +992,19 @@ def main() -> int: ) ci_cache.print_status() - if CI and not pr_info.is_merge_queue: + if IS_CI and not pr_info.is_merge_queue: # wait for pending jobs to be finished, await_jobs is a long blocking call ci_cache.await_pending_jobs(pr_info.is_release) if pr_info.is_release: + print("Release/master: CI Cache add pending records for all todo jobs") ci_cache.push_pending_all(pr_info.is_release) # conclude results result["git_ref"] = git_ref result["version"] = version - result["build"] = ci_cache.job_digests[Build.PACKAGE_RELEASE] - result["docs"] = ci_cache.job_digests[JobNames.DOCS_CHECK] + result["build"] = ci_cache.job_digests[CI.BuildNames.PACKAGE_RELEASE] + result["docs"] = ci_cache.job_digests[CI.JobNames.DOCS_CHECK] result["ci_settings"] = ci_settings.as_dict() if not args.skip_jobs: result["stages_data"] = _generate_ci_stage_config(ci_cache.jobs_to_do) @@ -1027,7 +1035,7 @@ def main() -> int: f"Check if rerun for name: [{check_name}], extended name [{check_name_with_group}]" ) previous_status = None - if CI_CONFIG.is_build_job(check_name): + if CI.is_build_job(check_name): # this is a build job - check if a build report is present build_result = ( BuildResult.load_any(check_name, pr_info.number, pr_info.head_ref) @@ -1055,10 +1063,8 @@ def main() -> int: # rerun helper check # FIXME: remove rerun_helper check and rely on ci cache only if check_name not in ( - # we might want to rerun reports' jobs - disable rerun check for them - JobNames.BUILD_CHECK, - JobNames.BUILD_CHECK_SPECIAL, - ): + CI.JobNames.BUILD_CHECK, + ): # we might want to rerun build report job rerun_helper = RerunHelper(commit, check_name_with_group) if rerun_helper.is_already_finished_by_status(): status = rerun_helper.get_finished_status() @@ -1071,7 +1077,7 @@ def main() -> int: # ci cache check if not previous_status and not ci_settings.no_ci_cache: ci_cache = CiCache(s3, indata["jobs_data"]["digests"]).update() - job_config = CI_CONFIG.get_job_config(check_name) + job_config = CI.get_job_config(check_name) if ci_cache.is_successful( check_name, args.batch, @@ -1111,7 +1117,7 @@ def main() -> int: ch_helper = ClickHouseHelper() check_url = "" - if CI_CONFIG.is_build_job(args.job_name): + if CI.is_build_job(args.job_name): assert ( indata ), f"--infile with config must be provided for POST action of a build type job [{args.job_name}]" @@ -1119,8 +1125,7 @@ def main() -> int: # upload binaries only for normal builds in PRs upload_binary = ( not pr_info.is_pr - or args.job_name - not in CI_CONFIG.get_builds_for_report(JobNames.BUILD_CHECK_SPECIAL) + or CI.get_job_ci_stage(args.job_name) == CI.WorkflowStages.BUILDS_1 or CiSettings.create_from_run_config(indata).upload_all ) diff --git a/tests/ci/ci_cache.py b/tests/ci/ci_cache.py index 56a84272a63..8ee0ae54385 100644 --- a/tests/ci/ci_cache.py +++ b/tests/ci/ci_cache.py @@ -5,7 +5,8 @@ from enum import Enum from pathlib import Path from typing import Dict, Optional, Any, Union, Sequence, List, Set -from ci_config import JobNames, Build, CI_CONFIG, JobConfig +from ci_config import CI + from ci_utils import is_hex, GHActions from commit_status_helper import CommitStatusData from env_helper import ( @@ -41,7 +42,7 @@ class CiCache: release - for jobs being executed on the release branch including master branch (not a PR branch) """ - _REQUIRED_DIGESTS = [JobNames.DOCS_CHECK, Build.PACKAGE_RELEASE] + _REQUIRED_DIGESTS = [CI.JobNames.DOCS_CHECK, CI.BuildNames.PACKAGE_RELEASE] _S3_CACHE_PREFIX = "CI_cache_v1" _CACHE_BUILD_REPORT_PREFIX = "build_report" _RECORD_FILE_EXTENSION = ".ci" @@ -80,7 +81,7 @@ class CiCache: @classmethod def is_docs_job(cls, job_name: str) -> bool: - return job_name == JobNames.DOCS_CHECK + return job_name == CI.JobNames.DOCS_CHECK @classmethod def is_srcs_job(cls, job_name: str) -> bool: @@ -105,8 +106,8 @@ class CiCache: ): self.enabled = cache_enabled self.jobs_to_skip = [] # type: List[str] - self.jobs_to_wait = {} # type: Dict[str, JobConfig] - self.jobs_to_do = {} # type: Dict[str, JobConfig] + self.jobs_to_wait = {} # type: Dict[str, CI.JobConfig] + self.jobs_to_do = {} # type: Dict[str, CI.JobConfig] self.s3 = s3 self.job_digests = job_digests self.cache_s3_paths = { @@ -127,9 +128,13 @@ class CiCache: @classmethod def calc_digests_and_create( - cls, s3: S3Helper, job_configs: Dict[str, JobConfig], cache_enabled: bool = True + cls, + s3: S3Helper, + job_configs: Dict[str, CI.JobConfig], + cache_enabled: bool = True, + dry_run: bool = False, ) -> "CiCache": - job_digester = JobDigester() + job_digester = JobDigester(dry_run=dry_run) digests = {} print("::group::Job Digests") @@ -140,9 +145,7 @@ class CiCache: for job in cls._REQUIRED_DIGESTS: if job not in job_configs: - digest = job_digester.get_job_digest( - CI_CONFIG.get_job_config(job).digest - ) + digest = job_digester.get_job_digest(CI.get_job_config(job).digest) digests[job] = digest print( f" job [{job.rjust(50)}] required for CI Cache has digest [{digest}]" @@ -154,10 +157,10 @@ class CiCache: self, job_digests: Dict[str, str], job_type: JobType ) -> str: if job_type == self.JobType.DOCS: - res = job_digests[JobNames.DOCS_CHECK] + res = job_digests[CI.JobNames.DOCS_CHECK] elif job_type == self.JobType.SRCS: - if Build.PACKAGE_RELEASE in job_digests: - res = job_digests[Build.PACKAGE_RELEASE] + if CI.BuildNames.PACKAGE_RELEASE in job_digests: + res = job_digests[CI.BuildNames.PACKAGE_RELEASE] else: assert False, "BUG, no build job in digest' list" else: @@ -648,7 +651,7 @@ class CiCache: report_path = Path(REPORT_PATH) report_path.mkdir(exist_ok=True, parents=True) path = ( - self._get_record_s3_path(Build.PACKAGE_RELEASE) + self._get_record_s3_path(CI.BuildNames.PACKAGE_RELEASE) + self._CACHE_BUILD_REPORT_PREFIX ) if file_prefix: @@ -664,13 +667,14 @@ class CiCache: def upload_build_report(self, build_result: BuildResult) -> str: result_json_path = build_result.write_json(Path(TEMP_PATH)) s3_path = ( - self._get_record_s3_path(Build.PACKAGE_RELEASE) + result_json_path.name + self._get_record_s3_path(CI.BuildNames.PACKAGE_RELEASE) + + result_json_path.name ) return self.s3.upload_file( bucket=S3_BUILDS_BUCKET, file_path=result_json_path, s3_path=s3_path ) - def await_pending_jobs(self, is_release: bool) -> None: + def await_pending_jobs(self, is_release: bool, dry_run: bool = False) -> None: """ await pending jobs to be finished @jobs_with_params - jobs to await. {JOB_NAME: {"batches": [BATCHES...], "num_batches": NUM_BATCHES}} @@ -687,15 +691,9 @@ class CiCache: MAX_JOB_NUM_TO_WAIT = 3 round_cnt = 0 - # FIXME: temporary experiment: lets enable await for PR' workflows awaiting on build' jobs only + # FIXME: temporary experiment: lets enable await for PR' workflows but for a shorter time if not is_release: - MAX_ROUNDS_TO_WAIT = 1 - remove_from_wait = [] - for job in self.jobs_to_wait: - if job not in Build: - remove_from_wait.append(job) - for job in remove_from_wait: - del self.jobs_to_wait[job] + MAX_ROUNDS_TO_WAIT = 3 while ( len(self.jobs_to_wait) > MAX_JOB_NUM_TO_WAIT @@ -713,11 +711,12 @@ class CiCache: start_at = int(time.time()) while expired_sec < TIMEOUT and self.jobs_to_wait: await_finished: Set[str] = set() - time.sleep(poll_interval_sec) + if not dry_run: + time.sleep(poll_interval_sec) self.update() for job_name, job_config in self.jobs_to_wait.items(): num_batches = job_config.num_batches - job_config = CI_CONFIG.get_job_config(job_name) + job_config = CI.get_job_config(job_name) assert job_config.pending_batches assert job_config.batches pending_batches = list(job_config.pending_batches) @@ -741,12 +740,11 @@ class CiCache: f"Job [{job_name}_[{batch}/{num_batches}]] is not pending anymore" ) job_config.batches.remove(batch) - job_config.pending_batches.remove(batch) else: print( f"NOTE: Job [{job_name}:{batch}] finished failed - do not add to ready" ) - job_config.pending_batches.remove(batch) + job_config.pending_batches.remove(batch) if not job_config.pending_batches: await_finished.add(job_name) @@ -754,18 +752,25 @@ class CiCache: for job in await_finished: self.jobs_to_skip.append(job) del self.jobs_to_wait[job] + del self.jobs_to_do[job] - expired_sec = int(time.time()) - start_at - print( - f"...awaiting continues... seconds left [{TIMEOUT - expired_sec}]" - ) + if not dry_run: + expired_sec = int(time.time()) - start_at + print( + f"...awaiting continues... seconds left [{TIMEOUT - expired_sec}]" + ) + else: + # make up for 2 iterations in dry_run + expired_sec += int(TIMEOUT / 2) + 1 GHActions.print_in_group( "Remaining jobs:", [list(self.jobs_to_wait)], ) - def apply(self, job_configs: Dict[str, JobConfig], is_release: bool) -> "CiCache": + def apply( + self, job_configs: Dict[str, CI.JobConfig], is_release: bool + ) -> "CiCache": if not self.enabled: self.jobs_to_do = job_configs return self diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 6ab1eb8bac4..60c6a60af1a 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -1,749 +1,619 @@ -#!/usr/bin/env python3 - -import logging import random import re from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser -from copy import deepcopy -from dataclasses import dataclass, field -from pathlib import Path -from typing import Callable, Dict, Iterable, List, Literal, Optional, Union +from typing import Dict, Optional, List -from ci_utils import WithIter, normalize_string -from integration_test_images import IMAGES +from ci_utils import normalize_string +from ci_definitions import * -class WorkFlows(metaclass=WithIter): - PULL_REQUEST = "PULL_REQUEST" - MASTER = "MASTER" - BACKPORT = "BACKPORT" - RELEASE = "RELEASE" - SYNC = "SYNC" - - -class CIStages(metaclass=WithIter): - NA = "UNKNOWN" - BUILDS_1 = "Builds_1" - BUILDS_2 = "Builds_2" - TESTS_1 = "Tests_1" - TESTS_2 = "Tests_2" - TESTS_3 = "Tests_3" - - -class Runners(metaclass=WithIter): - BUILDER = "builder" - STYLE_CHECKER = "style-checker" - STYLE_CHECKER_ARM = "style-checker-aarch64" - FUNC_TESTER = "func-tester" - FUNC_TESTER_ARM = "func-tester-aarch64" - STRESS_TESTER = "stress-tester" - FUZZER_UNIT_TESTER = "fuzzer-unit-tester" - - -class CILabels(metaclass=WithIter): - """ - Label names or commit tokens in normalized form - """ - - DO_NOT_TEST_LABEL = "do_not_test" - NO_MERGE_COMMIT = "no_merge_commit" - NO_CI_CACHE = "no_ci_cache" - # to upload all binaries from build jobs - UPLOAD_ALL_ARTIFACTS = "upload_all" - CI_SET_SYNC = "ci_set_sync" - CI_SET_ARM = "ci_set_arm" - CI_SET_REQUIRED = "ci_set_required" - CI_SET_NORMAL_BUILDS = "ci_set_normal_builds" - CI_SET_SPECIAL_BUILDS = "ci_set_special_builds" - CI_SET_NON_REQUIRED = "ci_set_non_required" - CI_SET_OLD_ANALYZER = "ci_set_old_analyzer" - - libFuzzer = "libFuzzer" - - -class Build(metaclass=WithIter): - PACKAGE_RELEASE = "package_release" - PACKAGE_AARCH64 = "package_aarch64" - PACKAGE_ASAN = "package_asan" - PACKAGE_UBSAN = "package_ubsan" - PACKAGE_TSAN = "package_tsan" - PACKAGE_MSAN = "package_msan" - PACKAGE_DEBUG = "package_debug" - PACKAGE_RELEASE_COVERAGE = "package_release_coverage" - BINARY_RELEASE = "binary_release" - BINARY_TIDY = "binary_tidy" - BINARY_DARWIN = "binary_darwin" - BINARY_AARCH64 = "binary_aarch64" - BINARY_AARCH64_V80COMPAT = "binary_aarch64_v80compat" - BINARY_FREEBSD = "binary_freebsd" - BINARY_DARWIN_AARCH64 = "binary_darwin_aarch64" - BINARY_PPC64LE = "binary_ppc64le" - BINARY_AMD64_COMPAT = "binary_amd64_compat" - BINARY_AMD64_MUSL = "binary_amd64_musl" - BINARY_RISCV64 = "binary_riscv64" - BINARY_S390X = "binary_s390x" - BINARY_LOONGARCH64 = "binary_loongarch64" - FUZZERS = "fuzzers" - - -class JobNames(metaclass=WithIter): - STYLE_CHECK = "Style check" - FAST_TEST = "Fast test" - DOCKER_SERVER = "Docker server image" - DOCKER_KEEPER = "Docker keeper image" - INSTALL_TEST_AMD = "Install packages (amd64)" - INSTALL_TEST_ARM = "Install packages (arm64)" - - STATELESS_TEST_DEBUG = "Stateless tests (debug)" - STATELESS_TEST_RELEASE = "Stateless tests (release)" - STATELESS_TEST_RELEASE_COVERAGE = "Stateless tests (coverage)" - STATELESS_TEST_AARCH64 = "Stateless tests (aarch64)" - STATELESS_TEST_ASAN = "Stateless tests (asan)" - STATELESS_TEST_TSAN = "Stateless tests (tsan)" - STATELESS_TEST_MSAN = "Stateless tests (msan)" - STATELESS_TEST_UBSAN = "Stateless tests (ubsan)" - STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE = ( - "Stateless tests (release, old analyzer, s3, DatabaseReplicated)" - ) - # merged into STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE: - # STATELESS_TEST_OLD_ANALYZER_RELEASE = "Stateless tests (release, analyzer)" - # STATELESS_TEST_DB_REPL_RELEASE = "Stateless tests (release, DatabaseReplicated)" - # STATELESS_TEST_S3_RELEASE = "Stateless tests (release, s3 storage)" - STATELESS_TEST_S3_DEBUG = "Stateless tests (debug, s3 storage)" - STATELESS_TEST_S3_TSAN = "Stateless tests (tsan, s3 storage)" - STATELESS_TEST_AZURE_ASAN = "Stateless tests (azure, asan)" - STATELESS_TEST_FLAKY_ASAN = "Stateless tests flaky check (asan)" - - STATEFUL_TEST_DEBUG = "Stateful tests (debug)" - STATEFUL_TEST_RELEASE = "Stateful tests (release)" - STATEFUL_TEST_RELEASE_COVERAGE = "Stateful tests (coverage)" - STATEFUL_TEST_AARCH64 = "Stateful tests (aarch64)" - STATEFUL_TEST_ASAN = "Stateful tests (asan)" - STATEFUL_TEST_TSAN = "Stateful tests (tsan)" - STATEFUL_TEST_MSAN = "Stateful tests (msan)" - STATEFUL_TEST_UBSAN = "Stateful tests (ubsan)" - STATEFUL_TEST_PARALLEL_REPL_RELEASE = "Stateful tests (release, ParallelReplicas)" - STATEFUL_TEST_PARALLEL_REPL_DEBUG = "Stateful tests (debug, ParallelReplicas)" - STATEFUL_TEST_PARALLEL_REPL_ASAN = "Stateful tests (asan, ParallelReplicas)" - STATEFUL_TEST_PARALLEL_REPL_MSAN = "Stateful tests (msan, ParallelReplicas)" - STATEFUL_TEST_PARALLEL_REPL_UBSAN = "Stateful tests (ubsan, ParallelReplicas)" - STATEFUL_TEST_PARALLEL_REPL_TSAN = "Stateful tests (tsan, ParallelReplicas)" - - STRESS_TEST_ASAN = "Stress test (asan)" - STRESS_TEST_TSAN = "Stress test (tsan)" - STRESS_TEST_UBSAN = "Stress test (ubsan)" - STRESS_TEST_MSAN = "Stress test (msan)" - STRESS_TEST_DEBUG = "Stress test (debug)" - STRESS_TEST_AZURE_TSAN = "Stress test (azure, tsan)" - STRESS_TEST_AZURE_MSAN = "Stress test (azure, msan)" - - INTEGRATION_TEST = "Integration tests (release)" - INTEGRATION_TEST_ASAN = "Integration tests (asan)" - INTEGRATION_TEST_ASAN_OLD_ANALYZER = "Integration tests (asan, old analyzer)" - INTEGRATION_TEST_TSAN = "Integration tests (tsan)" - INTEGRATION_TEST_ARM = "Integration tests (aarch64)" - INTEGRATION_TEST_FLAKY = "Integration tests flaky check (asan)" - - UPGRADE_TEST_DEBUG = "Upgrade check (debug)" - UPGRADE_TEST_ASAN = "Upgrade check (asan)" - UPGRADE_TEST_TSAN = "Upgrade check (tsan)" - UPGRADE_TEST_MSAN = "Upgrade check (msan)" - - UNIT_TEST = "Unit tests (release)" - UNIT_TEST_ASAN = "Unit tests (asan)" - UNIT_TEST_MSAN = "Unit tests (msan)" - UNIT_TEST_TSAN = "Unit tests (tsan)" - UNIT_TEST_UBSAN = "Unit tests (ubsan)" - - AST_FUZZER_TEST_DEBUG = "AST fuzzer (debug)" - AST_FUZZER_TEST_ASAN = "AST fuzzer (asan)" - AST_FUZZER_TEST_MSAN = "AST fuzzer (msan)" - AST_FUZZER_TEST_TSAN = "AST fuzzer (tsan)" - AST_FUZZER_TEST_UBSAN = "AST fuzzer (ubsan)" - - JEPSEN_KEEPER = "ClickHouse Keeper Jepsen" - JEPSEN_SERVER = "ClickHouse Server Jepsen" - - PERFORMANCE_TEST_AMD64 = "Performance Comparison" - PERFORMANCE_TEST_ARM64 = "Performance Comparison Aarch64" - - SQL_LOGIC_TEST = "Sqllogic test (release)" - - SQLANCER = "SQLancer (release)" - SQLANCER_DEBUG = "SQLancer (debug)" - SQLTEST = "SQLTest" - - COMPATIBILITY_TEST = "Compatibility check (amd64)" - COMPATIBILITY_TEST_ARM = "Compatibility check (aarch64)" - - CLICKBENCH_TEST = "ClickBench (amd64)" - CLICKBENCH_TEST_ARM = "ClickBench (aarch64)" - - LIBFUZZER_TEST = "libFuzzer tests" - - BUILD_CHECK = "ClickHouse build check" - BUILD_CHECK_SPECIAL = "ClickHouse special build check" - - DOCS_CHECK = "Docs check" - BUGFIX_VALIDATE = "Bugfix validation" - - -class StatusNames(metaclass=WithIter): - "Class with statuses that aren't related to particular jobs" - CI = "CI running" - MERGEABLE = "Mergeable Check" - SYNC = "A Sync" - - -# dynamically update JobName with Build jobs -for attr_name in dir(Build): - if not attr_name.startswith("__") and not callable(getattr(Build, attr_name)): - setattr(JobNames, attr_name, getattr(Build, attr_name)) - - -@dataclass -class DigestConfig: - # all files, dirs to include into digest, glob supported - include_paths: List[Union[str, Path]] = field(default_factory=list) - # file suffixes to exclude from digest - exclude_files: List[str] = field(default_factory=list) - # directories to exclude from digest - exclude_dirs: List[Union[str, Path]] = field(default_factory=list) - # docker names to include into digest - docker: List[str] = field(default_factory=list) - # git submodules digest - git_submodules: bool = False - - -@dataclass -class LabelConfig: - """ - configures different CI scenarios per GH label - """ - - run_jobs: Iterable[str] = frozenset() - - -@dataclass -class JobConfig: - """ - contains config parameters for job execution in CI workflow - """ - - # configures digest calculation for the job - digest: DigestConfig = field(default_factory=DigestConfig) - # will be triggered for the job if omitted in CI workflow yml - run_command: str = "" - # job timeout, seconds - timeout: Optional[int] = None - # sets number of batches for a multi-batch job - num_batches: int = 1 - # label that enables job in CI, if set digest isn't used - run_by_label: str = "" - # to run always regardless of the job digest or/and label - run_always: bool = False - # if the job needs to be run on the release branch, including master (building packages, docker server). - # NOTE: Subsequent runs on the same branch with the similar digest are still considered skip-able. - required_on_release_branch: bool = False - # job is for pr workflow only - pr_only: bool = False - # job is for release/master branches only - release_only: bool = False - # to randomly pick and run one job among jobs in the same @random_bucket (PR branches only). - random_bucket: str = "" - # Do not set it. A list of batches to run. It will be set in runtime in accordance with ci cache and ci settings - batches: Optional[List[int]] = None - # Do not set it. A list of batches to await. It will be set in runtime in accordance with ci cache and ci settings - pending_batches: Optional[List[int]] = None - - -builds_job_config = JobConfig( - required_on_release_branch=True, - digest=DigestConfig( - include_paths=[ - "./src", - "./contrib/*-cmake", - "./contrib/consistent-hashing", - "./contrib/murmurhash", - "./contrib/libfarmhash", - "./contrib/pdqsort", - "./contrib/cityhash102", - "./contrib/sparse-checkout", - "./contrib/libmetrohash", - "./contrib/update-submodules.sh", - "./contrib/CMakeLists.txt", - "./CMakeLists.txt", - "./PreLoad.cmake", - "./cmake", - "./base", - "./programs", - "./packages", - "./docker/packager/packager", - "./rust", - "./tests/ci/version_helper.py", - # FIXME: This is a WA to rebuild the CH and recreate the Performance.tar.zst artifact - # when there are changes in performance test scripts. - # Due to the current design of the perf test we need to rebuild CH when the performance test changes, - # otherwise the changes will not be visible in the PerformanceTest job in CI - "./tests/performance", - ], - exclude_files=[".md"], - docker=["clickhouse/binary-builder"], - git_submodules=True, - ), - run_command="build_check.py $BUILD_NAME", -) -fuzzer_build_job_config = deepcopy(builds_job_config) -fuzzer_build_job_config.run_by_label = CILabels.libFuzzer - - -@dataclass -class BuildConfig: - name: str - compiler: str - package_type: Literal["deb", "binary", "fuzzers"] - additional_pkgs: bool = False - debug_build: bool = False - coverage: bool = False - sanitizer: str = "" - tidy: bool = False - # sparse_checkout is needed only to test the option itself. - # No particular sense to use it in every build, since it slows down the job. - sparse_checkout: bool = False - comment: str = "" - static_binary_name: str = "" - job_config: JobConfig = field(default_factory=lambda: deepcopy(builds_job_config)) - - def export_env(self, export: bool = False) -> str: - def process(field_name: str, field: Union[bool, str]) -> str: - if isinstance(field, bool): - field = str(field).lower() - elif not isinstance(field, str): - field = "" - if export: - return f"export BUILD_{field_name.upper()}={repr(field)}" - return f"BUILD_{field_name.upper()}={field}" - - return "\n".join(process(k, v) for k, v in self.__dict__.items()) - - -@dataclass -class BuildReportConfig: - builds: List[str] - job_config: JobConfig = field( - default_factory=lambda: JobConfig( - run_command='build_report_check.py "$CHECK_NAME"', - digest=DigestConfig( - include_paths=[ - "./tests/ci/build_report_check.py", - "./tests/ci/upload_result_helper.py", - ], - ), - ) - ) - - -@dataclass -class TestConfig: - required_build: str - job_config: JobConfig = field(default_factory=JobConfig) - - -BuildConfigs = Dict[str, BuildConfig] -BuildsReportConfig = Dict[str, BuildReportConfig] -TestConfigs = Dict[str, TestConfig] -LabelConfigs = Dict[str, LabelConfig] - -# common digests configs -compatibility_check_digest = DigestConfig( - include_paths=["./tests/ci/compatibility_check.py"], - docker=["clickhouse/test-old-ubuntu", "clickhouse/test-old-centos"], -) -install_check_digest = DigestConfig( - include_paths=["./tests/ci/install_check.py"], - docker=["clickhouse/install-deb-test", "clickhouse/install-rpm-test"], -) -stateless_check_digest = DigestConfig( - include_paths=[ - "./tests/ci/functional_test_check.py", - "./tests/queries/0_stateless/", - "./tests/clickhouse-test", - "./tests/config", - "./tests/*.txt", - ], - exclude_files=[".md"], - docker=["clickhouse/stateless-test"], -) -stateful_check_digest = DigestConfig( - include_paths=[ - "./tests/ci/functional_test_check.py", - "./tests/queries/1_stateful/", - "./tests/clickhouse-test", - "./tests/config", - "./tests/*.txt", - ], - exclude_files=[".md"], - docker=["clickhouse/stateful-test"], -) - -stress_check_digest = DigestConfig( - include_paths=[ - "./tests/queries/0_stateless/", - "./tests/queries/1_stateful/", - "./tests/clickhouse-test", - "./tests/config", - "./tests/*.txt", - ], - exclude_files=[".md"], - docker=["clickhouse/stress-test"], -) -# FIXME: which tests are upgrade? just python? -upgrade_check_digest = DigestConfig( - include_paths=["./tests/ci/upgrade_check.py"], - exclude_files=[".md"], - docker=["clickhouse/upgrade-check"], -) -integration_check_digest = DigestConfig( - include_paths=[ - "./tests/ci/integration_test_check.py", - "./tests/ci/integration_tests_runner.py", - "./tests/integration/", - ], - exclude_files=[".md"], - docker=IMAGES.copy(), -) - -ast_fuzzer_check_digest = DigestConfig( - # include_paths=["./tests/ci/ast_fuzzer_check.py"], - # exclude_files=[".md"], - # docker=["clickhouse/fuzzer"], -) -unit_check_digest = DigestConfig( - include_paths=["./tests/ci/unit_tests_check.py"], - exclude_files=[".md"], - docker=["clickhouse/unit-test"], -) -perf_check_digest = DigestConfig( - include_paths=[ - "./tests/ci/performance_comparison_check.py", - "./tests/performance/", - ], - exclude_files=[".md"], - docker=["clickhouse/performance-comparison"], -) -sqllancer_check_digest = DigestConfig( - # include_paths=["./tests/ci/sqlancer_check.py"], - # exclude_files=[".md"], - # docker=["clickhouse/sqlancer-test"], -) -sqllogic_check_digest = DigestConfig( - include_paths=["./tests/ci/sqllogic_test.py"], - exclude_files=[".md"], - docker=["clickhouse/sqllogic-test"], -) -sqltest_check_digest = DigestConfig( - include_paths=["./tests/ci/sqltest.py"], - exclude_files=[".md"], - docker=["clickhouse/sqltest"], -) -bugfix_validate_check = DigestConfig( - include_paths=[ - "./tests/queries/0_stateless/", - "./tests/ci/integration_test_check.py", - "./tests/ci/functional_test_check.py", - "./tests/ci/bugfix_validate_check.py", - ], - exclude_files=[".md"], - docker=IMAGES.copy() - + [ - "clickhouse/stateless-test", - ], -) -# common test params -docker_server_job_config = JobConfig( - required_on_release_branch=True, - run_command='docker_server.py --check-name "$CHECK_NAME" --release-type head --allow-build-reuse', - digest=DigestConfig( - include_paths=[ - "tests/ci/docker_server.py", - "./docker/server", - ] - ), -) -compatibility_test_common_params = { - "digest": compatibility_check_digest, - "run_command": "compatibility_check.py", -} -stateless_test_common_params = { - "digest": stateless_check_digest, - "run_command": 'functional_test_check.py "$CHECK_NAME"', - "timeout": 10800, -} -stateful_test_common_params = { - "digest": stateful_check_digest, - "run_command": 'functional_test_check.py "$CHECK_NAME"', - "timeout": 3600, -} -stress_test_common_params = { - "digest": stress_check_digest, - "run_command": "stress_check.py", - "timeout": 9000, -} -upgrade_test_common_params = { - "digest": upgrade_check_digest, - "run_command": "upgrade_check.py", -} -astfuzzer_test_common_params = { - "digest": ast_fuzzer_check_digest, - "run_command": "ast_fuzzer_check.py", - "run_always": True, -} -integration_test_common_params = { - "digest": integration_check_digest, - "run_command": 'integration_test_check.py "$CHECK_NAME"', -} -unit_test_common_params = { - "digest": unit_check_digest, - "run_command": "unit_tests_check.py", -} -perf_test_common_params = { - "digest": perf_check_digest, - "run_command": "performance_comparison_check.py", -} -sqllancer_test_common_params = JobConfig( - digest=sqllancer_check_digest, - run_command="sqlancer_check.py", - release_only=True, - run_always=True, -) -sqllogic_test_params = JobConfig( - digest=sqllogic_check_digest, - run_command="sqllogic_test.py", - timeout=10800, - release_only=True, -) -sql_test_params = JobConfig( - digest=sqltest_check_digest, - run_command="sqltest.py", - timeout=10800, - release_only=True, -) -clickbench_test_params = { - "digest": DigestConfig( - include_paths=[ - "tests/ci/clickbench.py", - ], - docker=["clickhouse/clickbench"], - ), - "run_command": 'clickbench.py "$CHECK_NAME"', - "timeout": 900, -} -install_test_params = JobConfig( - digest=install_check_digest, - run_command='install_check.py "$CHECK_NAME"', - timeout=900, -) - - -@dataclass -class CIConfig: +class CI: """ Contains configs for all jobs in the CI pipeline each config item in the below dicts should be an instance of JobConfig class or inherited from it """ - build_config: BuildConfigs - builds_report_config: BuildsReportConfig - test_configs: TestConfigs - other_jobs_configs: TestConfigs - label_configs: LabelConfigs + # reimport types to CI class so that they visible as CI.* and mypy is happy + # pylint:disable=useless-import-alias,reimported,import-outside-toplevel + from ci_definitions import BuildConfig as BuildConfig + from ci_definitions import DigestConfig as DigestConfig + from ci_definitions import JobConfig as JobConfig + from ci_definitions import CheckDescription as CheckDescription + from ci_definitions import Tags as Tags + from ci_definitions import JobNames as JobNames + from ci_definitions import BuildNames as BuildNames + from ci_definitions import StatusNames as StatusNames + from ci_definitions import CHECK_DESCRIPTIONS as CHECK_DESCRIPTIONS + from ci_definitions import REQUIRED_CHECKS as REQUIRED_CHECKS + from ci_definitions import SyncState as SyncState + from ci_definitions import MQ_JOBS as MQ_JOBS + from ci_definitions import WorkflowStages as WorkflowStages + from ci_definitions import Runners as Runners # Jobs that run for doc related updates _DOCS_CHECK_JOBS = [JobNames.DOCS_CHECK, JobNames.STYLE_CHECK] - # Jobs that run in Merge Queue if it's enabled - _MQ_JOBS = [ - JobNames.STYLE_CHECK, - JobNames.FAST_TEST, - Build.BINARY_RELEASE, - JobNames.UNIT_TEST, - ] + TAG_CONFIGS = { + Tags.DO_NOT_TEST_LABEL: LabelConfig(run_jobs=[JobNames.STYLE_CHECK]), + Tags.CI_SET_ARM: LabelConfig( + run_jobs=[ + JobNames.STYLE_CHECK, + BuildNames.PACKAGE_AARCH64, + JobNames.INTEGRATION_TEST_ARM, + ] + ), + Tags.CI_SET_REQUIRED: LabelConfig(run_jobs=REQUIRED_CHECKS), + Tags.CI_SET_BUILDS: LabelConfig( + run_jobs=[JobNames.STYLE_CHECK, JobNames.BUILD_CHECK] + + [build for build in BuildNames if build != BuildNames.FUZZERS] + ), + Tags.CI_SET_NON_REQUIRED: LabelConfig( + run_jobs=[job for job in JobNames if job not in REQUIRED_CHECKS] + ), + Tags.CI_SET_OLD_ANALYZER: LabelConfig( + run_jobs=[ + JobNames.STYLE_CHECK, + JobNames.FAST_TEST, + BuildNames.PACKAGE_RELEASE, + BuildNames.PACKAGE_ASAN, + JobNames.STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE, + JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER, + ] + ), + Tags.CI_SET_SYNC: LabelConfig( + run_jobs=[ + BuildNames.PACKAGE_ASAN, + JobNames.STYLE_CHECK, + JobNames.BUILD_CHECK, + JobNames.UNIT_TEST_ASAN, + JobNames.STATEFUL_TEST_ASAN, + ] + ), + } - def get_label_config(self, label_name: str) -> Optional[LabelConfig]: - for label, config in self.label_configs.items(): + JOB_CONFIGS: Dict[str, JobConfig] = { + BuildNames.PACKAGE_RELEASE: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.PACKAGE_RELEASE, + compiler="clang-18", + package_type="deb", + static_binary_name="amd64", + additional_pkgs=True, + ) + ), + BuildNames.PACKAGE_AARCH64: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.PACKAGE_AARCH64, + compiler="clang-18-aarch64", + package_type="deb", + static_binary_name="aarch64", + additional_pkgs=True, + ) + ), + BuildNames.PACKAGE_ASAN: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.PACKAGE_ASAN, + compiler="clang-18", + sanitizer="address", + package_type="deb", + ), + ), + BuildNames.PACKAGE_UBSAN: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.PACKAGE_UBSAN, + compiler="clang-18", + sanitizer="undefined", + package_type="deb", + ), + ), + BuildNames.PACKAGE_TSAN: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.PACKAGE_TSAN, + compiler="clang-18", + sanitizer="thread", + package_type="deb", + ), + ), + BuildNames.PACKAGE_MSAN: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.PACKAGE_MSAN, + compiler="clang-18", + sanitizer="memory", + package_type="deb", + ), + ), + BuildNames.PACKAGE_DEBUG: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.PACKAGE_DEBUG, + compiler="clang-18", + debug_build=True, + package_type="deb", + sparse_checkout=True, # Check that it works with at least one build, see also update-submodules.sh + ), + ), + BuildNames.PACKAGE_RELEASE_COVERAGE: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.PACKAGE_RELEASE_COVERAGE, + compiler="clang-18", + coverage=True, + package_type="deb", + ), + ), + BuildNames.BINARY_RELEASE: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_RELEASE, + compiler="clang-18", + package_type="binary", + ), + ), + BuildNames.BINARY_TIDY: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_TIDY, + compiler="clang-18", + debug_build=True, + package_type="binary", + static_binary_name="debug-amd64", + tidy=True, + comment="clang-tidy is used for static analysis", + ), + ), + BuildNames.BINARY_DARWIN: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_DARWIN, + compiler="clang-18-darwin", + package_type="binary", + static_binary_name="macos", + ), + ), + BuildNames.BINARY_AARCH64: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_AARCH64, + compiler="clang-18-aarch64", + package_type="binary", + ), + ), + BuildNames.BINARY_AARCH64_V80COMPAT: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_AARCH64_V80COMPAT, + compiler="clang-18-aarch64-v80compat", + package_type="binary", + static_binary_name="aarch64v80compat", + comment="For ARMv8.1 and older", + ), + ), + BuildNames.BINARY_FREEBSD: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_FREEBSD, + compiler="clang-18-freebsd", + package_type="binary", + static_binary_name="freebsd", + ), + ), + BuildNames.BINARY_DARWIN_AARCH64: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_DARWIN_AARCH64, + compiler="clang-18-darwin-aarch64", + package_type="binary", + static_binary_name="macos-aarch64", + ), + ), + BuildNames.BINARY_PPC64LE: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_PPC64LE, + compiler="clang-18-ppc64le", + package_type="binary", + static_binary_name="powerpc64le", + ), + ), + BuildNames.BINARY_AMD64_COMPAT: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_AMD64_COMPAT, + compiler="clang-18-amd64-compat", + package_type="binary", + static_binary_name="amd64compat", + comment="SSE2-only build", + ), + ), + BuildNames.BINARY_AMD64_MUSL: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_AMD64_MUSL, + compiler="clang-18-amd64-musl", + package_type="binary", + static_binary_name="amd64musl", + comment="Build with Musl", + ), + ), + BuildNames.BINARY_RISCV64: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_RISCV64, + compiler="clang-18-riscv64", + package_type="binary", + static_binary_name="riscv64", + ), + ), + BuildNames.BINARY_S390X: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_S390X, + compiler="clang-18-s390x", + package_type="binary", + static_binary_name="s390x", + ), + ), + BuildNames.BINARY_LOONGARCH64: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.BINARY_LOONGARCH64, + compiler="clang-18-loongarch64", + package_type="binary", + static_binary_name="loongarch64", + ), + ), + BuildNames.FUZZERS: CommonJobConfigs.BUILD.with_properties( + build_config=BuildConfig( + name=BuildNames.FUZZERS, + compiler="clang-18", + package_type="fuzzers", + ), + run_by_label=Tags.libFuzzer, + ), + JobNames.BUILD_CHECK: CommonJobConfigs.BUILD_REPORT.with_properties(), + JobNames.INSTALL_TEST_AMD: CommonJobConfigs.INSTALL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE] + ), + JobNames.INSTALL_TEST_ARM: CommonJobConfigs.INSTALL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_AARCH64], + runner_type=Runners.STYLE_CHECKER_ARM, + ), + JobNames.STATEFUL_TEST_ASAN: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN] + ), + JobNames.STATEFUL_TEST_TSAN: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN] + ), + JobNames.STATEFUL_TEST_MSAN: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_MSAN] + ), + JobNames.STATEFUL_TEST_UBSAN: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_UBSAN] + ), + JobNames.STATEFUL_TEST_DEBUG: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_DEBUG] + ), + JobNames.STATEFUL_TEST_RELEASE: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE] + ), + JobNames.STATEFUL_TEST_RELEASE_COVERAGE: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE_COVERAGE] + ), + JobNames.STATEFUL_TEST_AARCH64: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_AARCH64], + runner_type=Runners.FUNC_TESTER_ARM, + ), + JobNames.STATEFUL_TEST_PARALLEL_REPL_RELEASE: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE] + ), + JobNames.STATEFUL_TEST_PARALLEL_REPL_DEBUG: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_DEBUG] + ), + JobNames.STATEFUL_TEST_PARALLEL_REPL_ASAN: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], + random_bucket="parrepl_with_sanitizer", + ), + JobNames.STATEFUL_TEST_PARALLEL_REPL_MSAN: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_MSAN], + random_bucket="parrepl_with_sanitizer", + ), + JobNames.STATEFUL_TEST_PARALLEL_REPL_UBSAN: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_UBSAN], + random_bucket="parrepl_with_sanitizer", + ), + JobNames.STATEFUL_TEST_PARALLEL_REPL_TSAN: CommonJobConfigs.STATEFUL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], + random_bucket="parrepl_with_sanitizer", + ), + JobNames.STATELESS_TEST_ASAN: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], num_batches=4 + ), + JobNames.STATELESS_TEST_TSAN: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], num_batches=5 + ), + JobNames.STATELESS_TEST_MSAN: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_MSAN], num_batches=6 + ), + JobNames.STATELESS_TEST_UBSAN: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_UBSAN], num_batches=2 + ), + JobNames.STATELESS_TEST_DEBUG: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_DEBUG], num_batches=5 + ), + JobNames.STATELESS_TEST_RELEASE: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], + ), + JobNames.STATELESS_TEST_RELEASE_COVERAGE: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE_COVERAGE], num_batches=6 + ), + JobNames.STATELESS_TEST_AARCH64: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_AARCH64], + runner_type=Runners.FUNC_TESTER_ARM, + ), + JobNames.STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], num_batches=4 + ), + JobNames.STATELESS_TEST_S3_DEBUG: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_DEBUG], num_batches=6 + ), + JobNames.STATELESS_TEST_AZURE_ASAN: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], num_batches=4, release_only=True + ), + JobNames.STATELESS_TEST_S3_TSAN: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], + num_batches=5, + ), + JobNames.STRESS_TEST_DEBUG: CommonJobConfigs.STRESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_DEBUG], + ), + JobNames.STRESS_TEST_TSAN: CommonJobConfigs.STRESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], + ), + JobNames.STRESS_TEST_ASAN: CommonJobConfigs.STRESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], + random_bucket="stress_with_sanitizer", + ), + JobNames.STRESS_TEST_UBSAN: CommonJobConfigs.STRESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_UBSAN], + random_bucket="stress_with_sanitizer", + ), + JobNames.STRESS_TEST_MSAN: CommonJobConfigs.STRESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_MSAN], + random_bucket="stress_with_sanitizer", + ), + JobNames.STRESS_TEST_AZURE_TSAN: CommonJobConfigs.STRESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], release_only=True + ), + JobNames.STRESS_TEST_AZURE_MSAN: CommonJobConfigs.STRESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_MSAN], release_only=True + ), + JobNames.UPGRADE_TEST_ASAN: CommonJobConfigs.UPGRADE_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], + random_bucket="upgrade_with_sanitizer", + pr_only=True, + ), + JobNames.UPGRADE_TEST_TSAN: CommonJobConfigs.UPGRADE_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], + random_bucket="upgrade_with_sanitizer", + pr_only=True, + ), + JobNames.UPGRADE_TEST_MSAN: CommonJobConfigs.UPGRADE_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_MSAN], + random_bucket="upgrade_with_sanitizer", + pr_only=True, + ), + JobNames.UPGRADE_TEST_DEBUG: CommonJobConfigs.UPGRADE_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_DEBUG], pr_only=True + ), + JobNames.INTEGRATION_TEST_ASAN: CommonJobConfigs.INTEGRATION_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], release_only=True, num_batches=4 + ), + JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER: CommonJobConfigs.INTEGRATION_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], num_batches=6 + ), + JobNames.INTEGRATION_TEST_TSAN: CommonJobConfigs.INTEGRATION_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], num_batches=6 + ), + JobNames.INTEGRATION_TEST_ARM: CommonJobConfigs.INTEGRATION_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_AARCH64], + num_batches=6, + runner_type=Runners.FUNC_TESTER_ARM, + ), + JobNames.INTEGRATION_TEST: CommonJobConfigs.INTEGRATION_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], + num_batches=4, + release_only=True, + ), + JobNames.INTEGRATION_TEST_FLAKY: CommonJobConfigs.INTEGRATION_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], pr_only=True + ), + JobNames.COMPATIBILITY_TEST: CommonJobConfigs.COMPATIBILITY_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], + required_on_release_branch=True, + ), + JobNames.COMPATIBILITY_TEST_ARM: CommonJobConfigs.COMPATIBILITY_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_AARCH64], + required_on_release_branch=True, + runner_type=Runners.STYLE_CHECKER_ARM, + ), + JobNames.UNIT_TEST: CommonJobConfigs.UNIT_TEST.with_properties( + required_builds=[BuildNames.BINARY_RELEASE], + ), + JobNames.UNIT_TEST_ASAN: CommonJobConfigs.UNIT_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], + ), + JobNames.UNIT_TEST_MSAN: CommonJobConfigs.UNIT_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_MSAN], + ), + JobNames.UNIT_TEST_TSAN: CommonJobConfigs.UNIT_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], + ), + JobNames.UNIT_TEST_UBSAN: CommonJobConfigs.UNIT_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_UBSAN], + ), + JobNames.AST_FUZZER_TEST_DEBUG: CommonJobConfigs.ASTFUZZER_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_DEBUG], + ), + JobNames.AST_FUZZER_TEST_ASAN: CommonJobConfigs.ASTFUZZER_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], + ), + JobNames.AST_FUZZER_TEST_MSAN: CommonJobConfigs.ASTFUZZER_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_MSAN], + ), + JobNames.AST_FUZZER_TEST_TSAN: CommonJobConfigs.ASTFUZZER_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_TSAN], + ), + JobNames.AST_FUZZER_TEST_UBSAN: CommonJobConfigs.ASTFUZZER_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_UBSAN], + ), + JobNames.STATELESS_TEST_FLAKY_ASAN: CommonJobConfigs.STATELESS_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_ASAN], pr_only=True, timeout=3600 + ), + JobNames.JEPSEN_KEEPER: JobConfig( + required_builds=[BuildNames.BINARY_RELEASE], + run_by_label="jepsen-test", + run_command="jepsen_check.py keeper", + runner_type=Runners.STYLE_CHECKER_ARM, + ), + JobNames.JEPSEN_SERVER: JobConfig( + required_builds=[BuildNames.BINARY_RELEASE], + run_by_label="jepsen-test", + run_command="jepsen_check.py server", + runner_type=Runners.STYLE_CHECKER_ARM, + ), + JobNames.PERFORMANCE_TEST_AMD64: CommonJobConfigs.PERF_TESTS.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], num_batches=4 + ), + JobNames.PERFORMANCE_TEST_ARM64: CommonJobConfigs.PERF_TESTS.with_properties( + required_builds=[BuildNames.PACKAGE_AARCH64], + num_batches=4, + run_by_label="pr-performance", + runner_type=Runners.FUNC_TESTER_ARM, + ), + JobNames.SQLANCER: CommonJobConfigs.SQLLANCER_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], + ), + JobNames.SQLANCER_DEBUG: CommonJobConfigs.SQLLANCER_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_DEBUG], + ), + JobNames.SQL_LOGIC_TEST: CommonJobConfigs.SQLLOGIC_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], + ), + JobNames.SQLTEST: CommonJobConfigs.SQL_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], + ), + JobNames.CLICKBENCH_TEST: CommonJobConfigs.CLICKBENCH_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE], + ), + JobNames.CLICKBENCH_TEST_ARM: CommonJobConfigs.CLICKBENCH_TEST.with_properties( + required_builds=[BuildNames.PACKAGE_AARCH64], + runner_type=Runners.FUNC_TESTER_ARM, + ), + JobNames.LIBFUZZER_TEST: JobConfig( + required_builds=[BuildNames.FUZZERS], + run_by_label=Tags.libFuzzer, + timeout=10800, + run_command='libfuzzer_test_check.py "$CHECK_NAME"', + runner_type=Runners.STYLE_CHECKER, + ), + JobNames.DOCKER_SERVER: CommonJobConfigs.DOCKER_SERVER.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE] + ), + JobNames.DOCKER_KEEPER: CommonJobConfigs.DOCKER_SERVER.with_properties( + required_builds=[BuildNames.PACKAGE_RELEASE] + ), + JobNames.DOCS_CHECK: JobConfig( + digest=DigestConfig( + include_paths=["**/*.md", "./docs", "tests/ci/docs_check.py"], + docker=["clickhouse/docs-builder"], + ), + run_command="docs_check.py", + runner_type=Runners.FUNC_TESTER, + ), + JobNames.FAST_TEST: JobConfig( + pr_only=True, + digest=DigestConfig( + include_paths=["./tests/queries/0_stateless/"], + exclude_files=[".md"], + docker=["clickhouse/fasttest"], + ), + timeout=2400, + runner_type=Runners.BUILDER, + ), + JobNames.STYLE_CHECK: JobConfig( + run_always=True, + runner_type=Runners.STYLE_CHECKER_ARM, + ), + JobNames.BUGFIX_VALIDATE: JobConfig( + run_by_label="pr-bugfix", + run_command="bugfix_validate_check.py", + timeout=900, + runner_type=Runners.STYLE_CHECKER, + ), + } + + @classmethod + def get_tag_config(cls, label_name: str) -> Optional[LabelConfig]: + for label, config in cls.TAG_CONFIGS.items(): if normalize_string(label_name) == normalize_string(label): return config return None - def get_job_ci_stage(self, job_name: str) -> str: + @classmethod + def get_job_ci_stage(cls, job_name: str) -> str: if job_name in [ JobNames.STYLE_CHECK, JobNames.FAST_TEST, + JobNames.JEPSEN_SERVER, JobNames.JEPSEN_KEEPER, JobNames.BUILD_CHECK, - JobNames.BUILD_CHECK_SPECIAL, ]: - # FIXME: we can't currently handle Jepsen in the Stage as it's job has concurrency directive - # BUILD_CHECK and BUILD_CHECK_SPECIAL runs not in stage because we need them even if Builds stage failed - return CIStages.NA + return WorkflowStages.NA + stage_type = None - if self.is_build_job(job_name): - stage_type = CIStages.BUILDS_1 - if job_name in CI_CONFIG.get_builds_for_report( - JobNames.BUILD_CHECK_SPECIAL - ): - # special builds go to Build_2 stage to not delay Builds_1/Test_1 - stage_type = CIStages.BUILDS_2 - elif self.is_docs_job(job_name): - stage_type = CIStages.TESTS_1 - elif self.is_test_job(job_name): - if job_name in CI_CONFIG.test_configs: - required_build = CI_CONFIG.test_configs[job_name].required_build - assert required_build - if required_build in CI_CONFIG.get_builds_for_report( - JobNames.BUILD_CHECK - ): - stage_type = CIStages.TESTS_1 - else: - stage_type = CIStages.TESTS_2 + if cls.is_build_job(job_name): + for _job, config in cls.JOB_CONFIGS.items(): + if config.required_builds and job_name in config.required_builds: + stage_type = WorkflowStages.BUILDS_1 + break else: - stage_type = CIStages.TESTS_1 - if job_name not in REQUIRED_CHECKS: - stage_type = CIStages.TESTS_3 + stage_type = WorkflowStages.BUILDS_2 + elif cls.is_docs_job(job_name): + stage_type = WorkflowStages.TESTS_1 + elif cls.is_test_job(job_name): + if job_name in CI.JOB_CONFIGS: + if job_name in REQUIRED_CHECKS: + stage_type = WorkflowStages.TESTS_1 + else: + stage_type = WorkflowStages.TESTS_3 assert stage_type, f"BUG [{job_name}]" return stage_type - def get_job_config(self, check_name: str) -> JobConfig: - res = None - for config in ( - self.build_config, - self.builds_report_config, - self.test_configs, - self.other_jobs_configs, - ): - if check_name in config: # type: ignore - res = config[check_name].job_config # type: ignore - break - return res # type: ignore + @classmethod + def get_job_config(cls, check_name: str) -> JobConfig: + return cls.JOB_CONFIGS[check_name] - def get_runner_type(self, check_name: str) -> str: - result = None - if self.is_build_job(check_name) or check_name == JobNames.FAST_TEST: - result = Runners.BUILDER - elif any( - words in check_name.lower() - for words in [ - "install packages", - "compatibility check", - "docker", - "build check", - "jepsen", - "style check", - ] - ): - result = Runners.STYLE_CHECKER - elif check_name == JobNames.DOCS_CHECK: - # docs job is demanding - result = Runners.FUNC_TESTER_ARM - elif any( - words in check_name.lower() - for words in [ - "stateless", - "stateful", - "clickbench", - "sqllogic test", - "libfuzzer", - "bugfix validation", - ] - ): - result = Runners.FUNC_TESTER - elif any( - words in check_name.lower() - for words in ["stress", "upgrade", "integration", "performance comparison"] - ): - result = Runners.STRESS_TESTER - elif any( - words in check_name.lower() - for words in ["ast fuzzer", "unit tests", "sqlancer", "sqltest"] - ): - result = Runners.FUZZER_UNIT_TESTER + @classmethod + def get_required_build_name(cls, check_name: str) -> str: + assert check_name in cls.JOB_CONFIGS + required_builds = cls.JOB_CONFIGS[check_name].required_builds + assert required_builds and len(required_builds) == 1 + return required_builds[0] - assert result, f"BUG, no runner for [{check_name}]" - - if ( - "aarch" in check_name.lower() or "arm64" in check_name.lower() - ) and "aarch" not in result: - if result == Runners.STRESS_TESTER: - # FIXME: no arm stress tester group atm - result = Runners.FUNC_TESTER_ARM - elif result == Runners.BUILDER: - # crosscompile - no arm required - pass - else: - # switch to aarch64 runner - result += "-aarch64" - - return result - - def get_job_parents(self, check_name: str) -> List[str]: - res = [] - check_name = normalize_string(check_name) - for config in ( - self.build_config, - self.test_configs, - self.other_jobs_configs, - ): - for job_name in config: # type: ignore - if check_name == normalize_string(job_name): - if isinstance(config[job_name], TestConfig): # type: ignore - if config[job_name].required_build: # type: ignore - res.append(config[job_name].required_build) # type: ignore - return res - - def get_digest_config(self, check_name: str) -> DigestConfig: - res = None - for config in ( - self.other_jobs_configs, - self.build_config, - self.builds_report_config, - self.test_configs, - ): - if check_name in config: # type: ignore - res = config[check_name].job_config.digest # type: ignore - assert ( - res - ), f"Invalid check_name or CI_CONFIG outdated, config not found for [{check_name}]" - return res # type: ignore + @classmethod + def get_job_parents(cls, check_name: str) -> List[str]: + return cls.JOB_CONFIGS[check_name].required_builds or [] + @classmethod def get_workflow_jobs_with_configs( - self, is_mq: bool, is_docs_only: bool, is_master: bool + cls, is_mq: bool, is_docs_only: bool, is_master: bool, is_pr: bool ) -> Dict[str, JobConfig]: """ get a list of all jobs for a workflow with configs """ jobs = [] if is_mq: - jobs = self._MQ_JOBS + jobs = MQ_JOBS elif is_docs_only: - jobs = self._DOCS_CHECK_JOBS + jobs = cls._DOCS_CHECK_JOBS else: - for config in ( - self.other_jobs_configs, - self.build_config, - self.builds_report_config, - self.test_configs, - ): - jobs += list(config) # type:ignore + # add all jobs + jobs = list(cls.JOB_CONFIGS) if is_master: - for job in self._MQ_JOBS: + for job in MQ_JOBS: jobs.remove(job) randomization_bucket_jobs = {} # type: Dict[str, Dict[str, JobConfig]] res = {} # type: Dict[str, JobConfig] for job in jobs: - job_config = self.get_job_config(job) + job_config = cls.JOB_CONFIGS[job] - if job_config.random_bucket: + if job_config.random_bucket and is_pr: if job_config.random_bucket not in randomization_bucket_jobs: randomization_bucket_jobs[job_config.random_bucket] = {} randomization_bucket_jobs[job_config.random_bucket][job] = job_config @@ -759,41 +629,20 @@ class CIConfig: return res - def get_builds_for_report( - self, report_name: str, release: bool = False, backport: bool = False - ) -> List[str]: - # hack to modify build list for release and bp wf - assert not (release and backport), "Invalid input" - if backport and report_name == JobNames.BUILD_CHECK: - return [ - Build.PACKAGE_RELEASE, - Build.PACKAGE_AARCH64, - Build.PACKAGE_ASAN, - Build.PACKAGE_TSAN, - Build.PACKAGE_DEBUG, - ] - if (release or backport) and report_name == JobNames.BUILD_CHECK_SPECIAL: - return [ - Build.BINARY_DARWIN, - Build.BINARY_DARWIN_AARCH64, - ] - - return self.builds_report_config[report_name].builds - @classmethod def is_build_job(cls, job: str) -> bool: - return job in Build + return job in cls.BuildNames @classmethod def is_test_job(cls, job: str) -> bool: - return not cls.is_build_job(job) and job != JobNames.STYLE_CHECK + return not cls.is_build_job(job) and job != cls.JobNames.STYLE_CHECK @classmethod def is_docs_job(cls, job: str) -> bool: return job == JobNames.DOCS_CHECK - @staticmethod - def is_required(check_name: str) -> bool: + @classmethod + def is_required(cls, check_name: str) -> bool: """Checks if a check_name is in REQUIRED_CHECKS, including batched jobs""" _BATCH_REGEXP = re.compile(r"\s+\[[0-9/]+\]$") if check_name in REQUIRED_CHECKS: @@ -802,810 +651,15 @@ class CIConfig: return check_name[: batch.start()] in REQUIRED_CHECKS return False - def validate(self) -> None: - errors = [] - for name, build_config in self.build_config.items(): - build_in_reports = False - for _, report_config in self.builds_report_config.items(): - if name in report_config.builds: - build_in_reports = True - break - # All build configs must belong to build_report_config - if not build_in_reports: - logging.error("Build name %s does not belong to build reports", name) - errors.append(f"Build name {name} does not belong to build reports") - # The name should be the same as build_config.name - if not build_config.name == name: - logging.error( - "Build name '%s' does not match the config 'name' value '%s'", - name, - build_config.name, - ) - errors.append( - f"Build name {name} does not match 'name' value '{build_config.name}'" - ) - # All build_report_config values should be in build_config.keys() - for build_report_name, build_report_config in self.builds_report_config.items(): - build_names = build_report_config.builds - missed_names = [ - name for name in build_names if name not in self.build_config.keys() - ] - if missed_names: - logging.error( - "The following names of the build report '%s' " - "are missed in build_config: %s", - build_report_name, - missed_names, - ) - errors.append( - f"The following names of the build report '{build_report_name}' " - f"are missed in build_config: {missed_names}", - ) - # And finally, all tests' requirements must be in the builds - for test_name, test_config in self.test_configs.items(): - if test_config.required_build not in self.build_config.keys(): - logging.error( - "The requirement '%s' for '%s' is not found in builds", - test_config, - test_name, - ) - errors.append( - f"The requirement '{test_config}' for " - f"'{test_name}' is not found in builds" - ) - if ( - test_config.required_build - and test_config.required_build - not in self.builds_report_config[JobNames.BUILD_CHECK].builds - ): - errors.append( - f"Test job' required build must be from [{JobNames.BUILD_CHECK}] list" - ) - - if errors: - raise KeyError("config contains errors", errors) + @classmethod + def get_build_config(cls, build_name: str) -> BuildConfig: + assert build_name in cls.JOB_CONFIGS, f"Invalid build name [{build_name}]" + res = cls.JOB_CONFIGS[build_name].build_config + assert res, f"not a build [{build_name}] or invalid JobConfig" + return res -# checks required by Mergeable Check -REQUIRED_CHECKS = [ - "PR Check", - StatusNames.SYNC, - JobNames.BUILD_CHECK, - JobNames.BUILD_CHECK_SPECIAL, - JobNames.DOCS_CHECK, - JobNames.FAST_TEST, - JobNames.STATEFUL_TEST_RELEASE, - JobNames.STATELESS_TEST_RELEASE, - JobNames.STATELESS_TEST_ASAN, - JobNames.STATELESS_TEST_FLAKY_ASAN, - JobNames.STATEFUL_TEST_ASAN, - JobNames.STYLE_CHECK, - JobNames.UNIT_TEST_ASAN, - JobNames.UNIT_TEST_MSAN, - JobNames.UNIT_TEST, - JobNames.UNIT_TEST_TSAN, - JobNames.UNIT_TEST_UBSAN, - JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER, - JobNames.STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE, -] - -CI_CONFIG = CIConfig( - label_configs={ - CILabels.DO_NOT_TEST_LABEL: LabelConfig(run_jobs=[JobNames.STYLE_CHECK]), - CILabels.CI_SET_ARM: LabelConfig( - run_jobs=[ - JobNames.STYLE_CHECK, - Build.PACKAGE_AARCH64, - JobNames.INTEGRATION_TEST_ARM, - ] - ), - CILabels.CI_SET_REQUIRED: LabelConfig(run_jobs=REQUIRED_CHECKS), - CILabels.CI_SET_NORMAL_BUILDS: LabelConfig( - run_jobs=[ - JobNames.STYLE_CHECK, - JobNames.BUILD_CHECK, - Build.PACKAGE_RELEASE, - Build.PACKAGE_AARCH64, - Build.PACKAGE_ASAN, - Build.PACKAGE_UBSAN, - Build.PACKAGE_TSAN, - Build.PACKAGE_MSAN, - Build.PACKAGE_DEBUG, - Build.BINARY_RELEASE, - Build.PACKAGE_RELEASE_COVERAGE, - Build.FUZZERS, - ] - ), - CILabels.CI_SET_SPECIAL_BUILDS: LabelConfig( - run_jobs=[ - JobNames.STYLE_CHECK, - JobNames.BUILD_CHECK_SPECIAL, - Build.BINARY_TIDY, - Build.BINARY_DARWIN, - Build.BINARY_AARCH64, - Build.BINARY_AARCH64_V80COMPAT, - Build.BINARY_FREEBSD, - Build.BINARY_DARWIN_AARCH64, - Build.BINARY_PPC64LE, - Build.BINARY_RISCV64, - Build.BINARY_S390X, - Build.BINARY_LOONGARCH64, - Build.BINARY_AMD64_COMPAT, - Build.BINARY_AMD64_MUSL, - ] - ), - CILabels.CI_SET_NON_REQUIRED: LabelConfig( - run_jobs=[job for job in JobNames if job not in REQUIRED_CHECKS] - ), - CILabels.CI_SET_OLD_ANALYZER: LabelConfig( - run_jobs=[ - JobNames.STYLE_CHECK, - JobNames.FAST_TEST, - Build.PACKAGE_RELEASE, - Build.PACKAGE_ASAN, - JobNames.STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE, - JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER, - ] - ), - CILabels.CI_SET_SYNC: LabelConfig( - run_jobs=[ - Build.PACKAGE_ASAN, - JobNames.STYLE_CHECK, - JobNames.BUILD_CHECK, - JobNames.UNIT_TEST_ASAN, - JobNames.STATEFUL_TEST_ASAN, - ] - ), - }, - build_config={ - Build.PACKAGE_RELEASE: BuildConfig( - name=Build.PACKAGE_RELEASE, - compiler="clang-18", - package_type="deb", - static_binary_name="amd64", - additional_pkgs=True, - ), - Build.PACKAGE_AARCH64: BuildConfig( - name=Build.PACKAGE_AARCH64, - compiler="clang-18-aarch64", - package_type="deb", - static_binary_name="aarch64", - additional_pkgs=True, - ), - Build.PACKAGE_ASAN: BuildConfig( - name=Build.PACKAGE_ASAN, - compiler="clang-18", - sanitizer="address", - package_type="deb", - ), - Build.PACKAGE_UBSAN: BuildConfig( - name=Build.PACKAGE_UBSAN, - compiler="clang-18", - sanitizer="undefined", - package_type="deb", - ), - Build.PACKAGE_TSAN: BuildConfig( - name=Build.PACKAGE_TSAN, - compiler="clang-18", - sanitizer="thread", - package_type="deb", - ), - Build.PACKAGE_MSAN: BuildConfig( - name=Build.PACKAGE_MSAN, - compiler="clang-18", - sanitizer="memory", - package_type="deb", - ), - Build.PACKAGE_DEBUG: BuildConfig( - name=Build.PACKAGE_DEBUG, - compiler="clang-18", - debug_build=True, - package_type="deb", - sparse_checkout=True, # Check that it works with at least one build, see also update-submodules.sh - ), - Build.PACKAGE_RELEASE_COVERAGE: BuildConfig( - name=Build.PACKAGE_RELEASE_COVERAGE, - compiler="clang-18", - coverage=True, - package_type="deb", - ), - Build.BINARY_RELEASE: BuildConfig( - name=Build.BINARY_RELEASE, - compiler="clang-18", - package_type="binary", - ), - Build.BINARY_TIDY: BuildConfig( - name=Build.BINARY_TIDY, - compiler="clang-18", - debug_build=True, - package_type="binary", - static_binary_name="debug-amd64", - tidy=True, - comment="clang-tidy is used for static analysis", - ), - Build.BINARY_DARWIN: BuildConfig( - name=Build.BINARY_DARWIN, - compiler="clang-18-darwin", - package_type="binary", - static_binary_name="macos", - ), - Build.BINARY_AARCH64: BuildConfig( - name=Build.BINARY_AARCH64, - compiler="clang-18-aarch64", - package_type="binary", - ), - Build.BINARY_AARCH64_V80COMPAT: BuildConfig( - name=Build.BINARY_AARCH64_V80COMPAT, - compiler="clang-18-aarch64-v80compat", - package_type="binary", - static_binary_name="aarch64v80compat", - comment="For ARMv8.1 and older", - ), - Build.BINARY_FREEBSD: BuildConfig( - name=Build.BINARY_FREEBSD, - compiler="clang-18-freebsd", - package_type="binary", - static_binary_name="freebsd", - ), - Build.BINARY_DARWIN_AARCH64: BuildConfig( - name=Build.BINARY_DARWIN_AARCH64, - compiler="clang-18-darwin-aarch64", - package_type="binary", - static_binary_name="macos-aarch64", - ), - Build.BINARY_PPC64LE: BuildConfig( - name=Build.BINARY_PPC64LE, - compiler="clang-18-ppc64le", - package_type="binary", - static_binary_name="powerpc64le", - ), - Build.BINARY_AMD64_COMPAT: BuildConfig( - name=Build.BINARY_AMD64_COMPAT, - compiler="clang-18-amd64-compat", - package_type="binary", - static_binary_name="amd64compat", - comment="SSE2-only build", - ), - Build.BINARY_AMD64_MUSL: BuildConfig( - name=Build.BINARY_AMD64_MUSL, - compiler="clang-18-amd64-musl", - package_type="binary", - static_binary_name="amd64musl", - comment="Build with Musl", - ), - Build.BINARY_RISCV64: BuildConfig( - name=Build.BINARY_RISCV64, - compiler="clang-18-riscv64", - package_type="binary", - static_binary_name="riscv64", - ), - Build.BINARY_S390X: BuildConfig( - name=Build.BINARY_S390X, - compiler="clang-18-s390x", - package_type="binary", - static_binary_name="s390x", - ), - Build.BINARY_LOONGARCH64: BuildConfig( - name=Build.BINARY_LOONGARCH64, - compiler="clang-18-loongarch64", - package_type="binary", - static_binary_name="loongarch64", - ), - Build.FUZZERS: BuildConfig( - name=Build.FUZZERS, - compiler="clang-18", - package_type="fuzzers", - job_config=fuzzer_build_job_config, - ), - }, - builds_report_config={ - JobNames.BUILD_CHECK: BuildReportConfig( - builds=[ - Build.PACKAGE_RELEASE, - Build.PACKAGE_AARCH64, - Build.PACKAGE_ASAN, - Build.PACKAGE_UBSAN, - Build.PACKAGE_TSAN, - Build.PACKAGE_MSAN, - Build.PACKAGE_DEBUG, - Build.BINARY_RELEASE, - Build.PACKAGE_RELEASE_COVERAGE, - Build.FUZZERS, - ] - ), - JobNames.BUILD_CHECK_SPECIAL: BuildReportConfig( - builds=[ - Build.BINARY_TIDY, - Build.BINARY_DARWIN, - Build.BINARY_AARCH64, - Build.BINARY_AARCH64_V80COMPAT, - Build.BINARY_FREEBSD, - Build.BINARY_DARWIN_AARCH64, - Build.BINARY_PPC64LE, - Build.BINARY_RISCV64, - Build.BINARY_S390X, - Build.BINARY_LOONGARCH64, - Build.BINARY_AMD64_COMPAT, - Build.BINARY_AMD64_MUSL, - ] - ), - }, - other_jobs_configs={ - JobNames.DOCKER_SERVER: TestConfig("", job_config=docker_server_job_config), - JobNames.DOCKER_KEEPER: TestConfig("", job_config=docker_server_job_config), - JobNames.DOCS_CHECK: TestConfig( - "", - job_config=JobConfig( - digest=DigestConfig( - include_paths=["**/*.md", "./docs", "tests/ci/docs_check.py"], - docker=["clickhouse/docs-builder"], - ), - run_command="docs_check.py", - ), - ), - JobNames.FAST_TEST: TestConfig( - "", - job_config=JobConfig( - pr_only=True, - digest=DigestConfig( - include_paths=["./tests/queries/0_stateless/"], - exclude_files=[".md"], - docker=["clickhouse/fasttest"], - ), - timeout=2400, - ), - ), - JobNames.STYLE_CHECK: TestConfig( - "", - job_config=JobConfig( - run_always=True, - ), - ), - JobNames.BUGFIX_VALIDATE: TestConfig( - "", - # we run this check by label - no digest required - job_config=JobConfig( - run_by_label="pr-bugfix", - run_command="bugfix_validate_check.py", - timeout=900, - ), - ), - }, - test_configs={ - JobNames.INSTALL_TEST_AMD: TestConfig( - Build.PACKAGE_RELEASE, job_config=install_test_params - ), - JobNames.INSTALL_TEST_ARM: TestConfig( - Build.PACKAGE_AARCH64, job_config=install_test_params - ), - JobNames.STATEFUL_TEST_ASAN: TestConfig( - Build.PACKAGE_ASAN, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_TSAN: TestConfig( - Build.PACKAGE_TSAN, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_MSAN: TestConfig( - Build.PACKAGE_MSAN, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_UBSAN: TestConfig( - Build.PACKAGE_UBSAN, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_DEBUG: TestConfig( - Build.PACKAGE_DEBUG, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_RELEASE: TestConfig( - Build.PACKAGE_RELEASE, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_RELEASE_COVERAGE: TestConfig( - Build.PACKAGE_RELEASE_COVERAGE, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_AARCH64: TestConfig( - Build.PACKAGE_AARCH64, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - # Stateful tests for parallel replicas - JobNames.STATEFUL_TEST_PARALLEL_REPL_RELEASE: TestConfig( - Build.PACKAGE_RELEASE, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_PARALLEL_REPL_DEBUG: TestConfig( - Build.PACKAGE_DEBUG, job_config=JobConfig(**stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_PARALLEL_REPL_ASAN: TestConfig( - Build.PACKAGE_ASAN, job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_PARALLEL_REPL_MSAN: TestConfig( - Build.PACKAGE_MSAN, job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_PARALLEL_REPL_UBSAN: TestConfig( - Build.PACKAGE_UBSAN, job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore - ), - JobNames.STATEFUL_TEST_PARALLEL_REPL_TSAN: TestConfig( - Build.PACKAGE_TSAN, job_config=JobConfig(random_bucket="parrepl_with_sanitizer", **stateful_test_common_params) # type: ignore - ), - # End stateful tests for parallel replicas - JobNames.STATELESS_TEST_ASAN: TestConfig( - Build.PACKAGE_ASAN, - job_config=JobConfig(num_batches=4, **stateless_test_common_params), # type: ignore - ), - JobNames.STATELESS_TEST_TSAN: TestConfig( - Build.PACKAGE_TSAN, - job_config=JobConfig(num_batches=5, **stateless_test_common_params), # type: ignore - ), - JobNames.STATELESS_TEST_MSAN: TestConfig( - Build.PACKAGE_MSAN, - job_config=JobConfig(num_batches=6, **stateless_test_common_params), # type: ignore - ), - JobNames.STATELESS_TEST_UBSAN: TestConfig( - Build.PACKAGE_UBSAN, - job_config=JobConfig(num_batches=2, **stateless_test_common_params), # type: ignore - ), - JobNames.STATELESS_TEST_DEBUG: TestConfig( - Build.PACKAGE_DEBUG, - job_config=JobConfig(num_batches=5, **stateless_test_common_params), # type: ignore - ), - JobNames.STATELESS_TEST_RELEASE: TestConfig( - Build.PACKAGE_RELEASE, job_config=JobConfig(**stateless_test_common_params) # type: ignore - ), - JobNames.STATELESS_TEST_RELEASE_COVERAGE: TestConfig( - Build.PACKAGE_RELEASE_COVERAGE, - job_config=JobConfig(num_batches=6, **stateless_test_common_params), # type: ignore - ), - JobNames.STATELESS_TEST_AARCH64: TestConfig( - Build.PACKAGE_AARCH64, job_config=JobConfig(**stateless_test_common_params) # type: ignore - ), - JobNames.STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE: TestConfig( - Build.PACKAGE_RELEASE, - job_config=JobConfig(num_batches=4, **stateless_test_common_params), # type: ignore - ), - JobNames.STATELESS_TEST_S3_DEBUG: TestConfig( - Build.PACKAGE_DEBUG, - job_config=JobConfig(num_batches=6, **stateless_test_common_params), # type: ignore - ), - JobNames.STATELESS_TEST_AZURE_ASAN: TestConfig( - Build.PACKAGE_ASAN, - job_config=JobConfig(num_batches=4, **stateless_test_common_params, release_only=True), # type: ignore - ), - JobNames.STATELESS_TEST_S3_TSAN: TestConfig( - Build.PACKAGE_TSAN, - job_config=JobConfig(num_batches=5, **stateless_test_common_params), # type: ignore - ), - JobNames.STRESS_TEST_DEBUG: TestConfig( - Build.PACKAGE_DEBUG, job_config=JobConfig(**stress_test_common_params) # type: ignore - ), - JobNames.STRESS_TEST_TSAN: TestConfig( - Build.PACKAGE_TSAN, job_config=JobConfig(**stress_test_common_params) # type: ignore - ), - JobNames.STRESS_TEST_ASAN: TestConfig( - Build.PACKAGE_ASAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore - ), - JobNames.STRESS_TEST_UBSAN: TestConfig( - Build.PACKAGE_UBSAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore - ), - JobNames.STRESS_TEST_MSAN: TestConfig( - Build.PACKAGE_MSAN, job_config=JobConfig(random_bucket="stress_with_sanitizer", **stress_test_common_params) # type: ignore - ), - JobNames.UPGRADE_TEST_ASAN: TestConfig( - Build.PACKAGE_ASAN, job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params) # type: ignore - ), - JobNames.STRESS_TEST_AZURE_TSAN: TestConfig( - Build.PACKAGE_TSAN, job_config=JobConfig(**stress_test_common_params, release_only=True) # type: ignore - ), - JobNames.STRESS_TEST_AZURE_MSAN: TestConfig( - Build.PACKAGE_MSAN, job_config=JobConfig(**stress_test_common_params, release_only=True) # type: ignore - ), - JobNames.UPGRADE_TEST_TSAN: TestConfig( - Build.PACKAGE_TSAN, job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params) # type: ignore - ), - JobNames.UPGRADE_TEST_MSAN: TestConfig( - Build.PACKAGE_MSAN, job_config=JobConfig(pr_only=True, random_bucket="upgrade_with_sanitizer", **upgrade_test_common_params) # type: ignore - ), - JobNames.UPGRADE_TEST_DEBUG: TestConfig( - Build.PACKAGE_DEBUG, job_config=JobConfig(pr_only=True, **upgrade_test_common_params) # type: ignore - ), - JobNames.INTEGRATION_TEST_ASAN: TestConfig( - Build.PACKAGE_ASAN, - job_config=JobConfig(num_batches=4, **integration_test_common_params, release_only=True), # type: ignore - ), - JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER: TestConfig( - Build.PACKAGE_ASAN, - job_config=JobConfig(num_batches=6, **integration_test_common_params), # type: ignore - ), - JobNames.INTEGRATION_TEST_TSAN: TestConfig( - Build.PACKAGE_TSAN, - job_config=JobConfig(num_batches=6, **integration_test_common_params), # type: ignore - ), - JobNames.INTEGRATION_TEST_ARM: TestConfig( - Build.PACKAGE_AARCH64, - job_config=JobConfig(num_batches=6, **integration_test_common_params), # type: ignore - ), - JobNames.INTEGRATION_TEST: TestConfig( - Build.PACKAGE_RELEASE, - job_config=JobConfig(num_batches=4, **integration_test_common_params, release_only=True), # type: ignore - ), - JobNames.INTEGRATION_TEST_FLAKY: TestConfig( - Build.PACKAGE_ASAN, job_config=JobConfig(pr_only=True, **integration_test_common_params) # type: ignore - ), - JobNames.COMPATIBILITY_TEST: TestConfig( - Build.PACKAGE_RELEASE, - job_config=JobConfig( - required_on_release_branch=True, **compatibility_test_common_params # type: ignore - ), - ), - JobNames.COMPATIBILITY_TEST_ARM: TestConfig( - Build.PACKAGE_AARCH64, - job_config=JobConfig( - required_on_release_branch=True, **compatibility_test_common_params # type: ignore - ), - ), - JobNames.UNIT_TEST: TestConfig( - Build.BINARY_RELEASE, job_config=JobConfig(**unit_test_common_params) # type: ignore - ), - JobNames.UNIT_TEST_ASAN: TestConfig( - Build.PACKAGE_ASAN, job_config=JobConfig(**unit_test_common_params) # type: ignore - ), - JobNames.UNIT_TEST_MSAN: TestConfig( - Build.PACKAGE_MSAN, job_config=JobConfig(**unit_test_common_params) # type: ignore - ), - JobNames.UNIT_TEST_TSAN: TestConfig( - Build.PACKAGE_TSAN, job_config=JobConfig(**unit_test_common_params) # type: ignore - ), - JobNames.UNIT_TEST_UBSAN: TestConfig( - Build.PACKAGE_UBSAN, job_config=JobConfig(**unit_test_common_params) # type: ignore - ), - JobNames.AST_FUZZER_TEST_DEBUG: TestConfig( - Build.PACKAGE_DEBUG, job_config=JobConfig(**astfuzzer_test_common_params) # type: ignore - ), - JobNames.AST_FUZZER_TEST_ASAN: TestConfig( - Build.PACKAGE_ASAN, job_config=JobConfig(**astfuzzer_test_common_params) # type: ignore - ), - JobNames.AST_FUZZER_TEST_MSAN: TestConfig( - Build.PACKAGE_MSAN, job_config=JobConfig(**astfuzzer_test_common_params) # type: ignore - ), - JobNames.AST_FUZZER_TEST_TSAN: TestConfig( - Build.PACKAGE_TSAN, job_config=JobConfig(**astfuzzer_test_common_params) # type: ignore - ), - JobNames.AST_FUZZER_TEST_UBSAN: TestConfig( - Build.PACKAGE_UBSAN, job_config=JobConfig(**astfuzzer_test_common_params) # type: ignore - ), - JobNames.STATELESS_TEST_FLAKY_ASAN: TestConfig( - # replace to non-default - Build.PACKAGE_ASAN, - job_config=JobConfig(pr_only=True, **{**stateless_test_common_params, "timeout": 3600}), # type: ignore - ), - JobNames.JEPSEN_KEEPER: TestConfig( - Build.BINARY_RELEASE, - job_config=JobConfig( - run_by_label="jepsen-test", run_command="jepsen_check.py keeper" - ), - ), - JobNames.JEPSEN_SERVER: TestConfig( - Build.BINARY_RELEASE, - job_config=JobConfig( - run_by_label="jepsen-test", run_command="jepsen_check.py server" - ), - ), - JobNames.PERFORMANCE_TEST_AMD64: TestConfig( - Build.PACKAGE_RELEASE, - job_config=JobConfig(num_batches=4, **perf_test_common_params), # type: ignore - ), - JobNames.PERFORMANCE_TEST_ARM64: TestConfig( - Build.PACKAGE_AARCH64, - job_config=JobConfig(num_batches=4, run_by_label="pr-performance", **perf_test_common_params), # type: ignore - ), - JobNames.SQLANCER: TestConfig( - Build.PACKAGE_RELEASE, job_config=sqllancer_test_common_params - ), - JobNames.SQLANCER_DEBUG: TestConfig( - Build.PACKAGE_DEBUG, job_config=sqllancer_test_common_params - ), - JobNames.SQL_LOGIC_TEST: TestConfig( - Build.PACKAGE_RELEASE, job_config=sqllogic_test_params - ), - JobNames.SQLTEST: TestConfig(Build.PACKAGE_RELEASE, job_config=sql_test_params), - JobNames.CLICKBENCH_TEST: TestConfig( - Build.PACKAGE_RELEASE, job_config=JobConfig(**clickbench_test_params) # type: ignore - ), - JobNames.CLICKBENCH_TEST_ARM: TestConfig( - Build.PACKAGE_AARCH64, job_config=JobConfig(**clickbench_test_params) # type: ignore - ), - JobNames.LIBFUZZER_TEST: TestConfig( - Build.FUZZERS, - job_config=JobConfig( - run_by_label=CILabels.libFuzzer, - timeout=10800, - run_command='libfuzzer_test_check.py "$CHECK_NAME"', - ), - ), # type: ignore - }, -) -CI_CONFIG.validate() - - -@dataclass -class CheckDescription: - name: str - description: str # the check descriptions, will be put into the status table - match_func: Callable[[str], bool] # the function to check vs the commit status - - def __hash__(self) -> int: - return hash(self.name + self.description) - - -CHECK_DESCRIPTIONS = [ - CheckDescription( - "PR Check", - "Checks correctness of the PR's body", - lambda x: x == "PR Check", - ), - CheckDescription( - StatusNames.SYNC, - "If it fails, ask a maintainer for help", - lambda x: x == StatusNames.SYNC, - ), - CheckDescription( - "AST fuzzer", - "Runs randomly generated queries to catch program errors. " - "The build type is optionally given in parenthesis. " - "If it fails, ask a maintainer for help", - lambda x: x.startswith("AST fuzzer"), - ), - CheckDescription( - JobNames.BUGFIX_VALIDATE, - "Checks that either a new test (functional or integration) or there " - "some changed tests that fail with the binary built on master branch", - lambda x: x == JobNames.BUGFIX_VALIDATE, - ), - CheckDescription( - "CI running", - "A meta-check that indicates the running CI. Normally, it's in success or " - "pending state. The failed status indicates some problems with the PR", - lambda x: x == "CI running", - ), - CheckDescription( - "ClickHouse build check", - "Builds ClickHouse in various configurations for use in further steps. " - "You have to fix the builds that fail. Build logs often has enough " - "information to fix the error, but you might have to reproduce the failure " - "locally. The cmake options can be found in the build log, grepping for " - 'cmake. Use these options and follow the general build process', - lambda x: x.startswith("ClickHouse") and x.endswith("build check"), - ), - CheckDescription( - "Compatibility check", - "Checks that clickhouse binary runs on distributions with old libc " - "versions. If it fails, ask a maintainer for help", - lambda x: x.startswith("Compatibility check"), - ), - CheckDescription( - JobNames.DOCKER_SERVER, - "The check to build and optionally push the mentioned image to docker hub", - lambda x: x.startswith("Docker server"), - ), - CheckDescription( - JobNames.DOCKER_KEEPER, - "The check to build and optionally push the mentioned image to docker hub", - lambda x: x.startswith("Docker keeper"), - ), - CheckDescription( - JobNames.DOCS_CHECK, - "Builds and tests the documentation", - lambda x: x == JobNames.DOCS_CHECK, - ), - CheckDescription( - JobNames.FAST_TEST, - "Normally this is the first check that is ran for a PR. It builds ClickHouse " - 'and runs most of stateless functional tests, ' - "omitting some. If it fails, further checks are not started until it is fixed. " - "Look at the report to see which tests fail, then reproduce the failure " - 'locally as described here', - lambda x: x == JobNames.FAST_TEST, - ), - CheckDescription( - "Flaky tests", - "Checks if new added or modified tests are flaky by running them repeatedly, " - "in parallel, with more randomization. Functional tests are run 100 times " - "with address sanitizer, and additional randomization of thread scheduling. " - "Integration tests are run up to 10 times. If at least once a new test has " - "failed, or was too long, this check will be red. We don't allow flaky tests, " - 'read the doc', - lambda x: "tests flaky check" in x, - ), - CheckDescription( - "Install packages", - "Checks that the built packages are installable in a clear environment", - lambda x: x.startswith("Install packages ("), - ), - CheckDescription( - "Integration tests", - "The integration tests report. In parenthesis the package type is given, " - "and in square brackets are the optional part/total tests", - lambda x: x.startswith("Integration tests ("), - ), - CheckDescription( - StatusNames.MERGEABLE, - "Checks if all other necessary checks are successful", - lambda x: x == StatusNames.MERGEABLE, - ), - CheckDescription( - "Performance Comparison", - "Measure changes in query performance. The performance test report is " - 'described in detail here. ' - "In square brackets are the optional part/total tests", - lambda x: x.startswith("Performance Comparison"), - ), - CheckDescription( - "Push to Dockerhub", - "The check for building and pushing the CI related docker images to docker hub", - lambda x: x.startswith("Push") and "to Dockerhub" in x, - ), - CheckDescription( - "Sqllogic", - "Run clickhouse on the " - 'sqllogic ' - "test set against sqlite and checks that all statements are passed", - lambda x: x.startswith("Sqllogic test"), - ), - CheckDescription( - "SQLancer", - "Fuzzing tests that detect logical bugs with " - 'SQLancer tool', - lambda x: x.startswith("SQLancer"), - ), - CheckDescription( - "Stateful tests", - "Runs stateful functional tests for ClickHouse binaries built in various " - "configurations -- release, debug, with sanitizers, etc", - lambda x: x.startswith("Stateful tests ("), - ), - CheckDescription( - "Stateless tests", - "Runs stateless functional tests for ClickHouse binaries built in various " - "configurations -- release, debug, with sanitizers, etc", - lambda x: x.startswith("Stateless tests ("), - ), - CheckDescription( - "Stress test", - "Runs stateless functional tests concurrently from several clients to detect " - "concurrency-related errors", - lambda x: x.startswith("Stress test ("), - ), - CheckDescription( - JobNames.STYLE_CHECK, - "Runs a set of checks to keep the code style clean. If some of tests failed, " - "see the related log from the report", - lambda x: x == JobNames.STYLE_CHECK, - ), - CheckDescription( - "Unit tests", - "Runs the unit tests for different release types", - lambda x: x.startswith("Unit tests ("), - ), - CheckDescription( - "Upgrade check", - "Runs stress tests on server version from last release and then tries to " - "upgrade it to the version from the PR. It checks if the new server can " - "successfully startup without any errors, crashes or sanitizer asserts", - lambda x: x.startswith("Upgrade check ("), - ), - CheckDescription( - "ClickBench", - "Runs [ClickBench](https://github.com/ClickHouse/ClickBench/) with instant-attach table", - lambda x: x.startswith("ClickBench"), - ), - CheckDescription( - "Fallback for unknown", - "There's no description for the check yet, please add it to " - "tests/ci/ci_config.py:CHECK_DESCRIPTIONS", - lambda x: True, - ), -] - - -def main() -> None: +if __name__ == "__main__": parser = ArgumentParser( formatter_class=ArgumentDefaultsHelpFormatter, description="The script provides build config for GITHUB_ENV or shell export", @@ -1617,10 +671,9 @@ def main() -> None: help="if set, the ENV parameters are provided for shell export", ) args = parser.parse_args() - build_config = CI_CONFIG.build_config.get(args.build_name) - if build_config: - print(build_config.export_env(args.export)) - - -if __name__ == "__main__": - main() + assert ( + args.build_name in CI.JOB_CONFIGS + ), f"Build name [{args.build_name}] is not valid" + build_config = CI.JOB_CONFIGS[args.build_name].build_config + assert build_config, "--export must not be used for non-build jobs" + print(build_config.export_env(args.export)) diff --git a/tests/ci/ci_definitions.py b/tests/ci/ci_definitions.py new file mode 100644 index 00000000000..fdd5dc7a671 --- /dev/null +++ b/tests/ci/ci_definitions.py @@ -0,0 +1,781 @@ +import copy +from dataclasses import dataclass, field +from pathlib import Path +from typing import Callable, List, Union, Iterable, Optional, Literal, Any + +from ci_utils import WithIter +from integration_test_images import IMAGES + + +class WorkflowStages(metaclass=WithIter): + """ + Stages of GitHUb actions workflow + """ + + # for jobs that do not belong to any stage, e.g. Build Report Check + NA = "UNKNOWN" + # normal builds (builds that required for further testing) + BUILDS_1 = "Builds_1" + # special builds + BUILDS_2 = "Builds_2" + # all tests required for merge + TESTS_1 = "Tests_1" + # not used atm + TESTS_2 = "Tests_2" + # all tests not required for merge + TESTS_3 = "Tests_3" + + +class Runners(metaclass=WithIter): + """ + GitHub runner's labels + """ + + BUILDER = "builder" + STYLE_CHECKER = "style-checker" + STYLE_CHECKER_ARM = "style-checker-aarch64" + FUNC_TESTER = "func-tester" + FUNC_TESTER_ARM = "func-tester-aarch64" + STRESS_TESTER = "stress-tester" + FUZZER_UNIT_TESTER = "fuzzer-unit-tester" + + +class Tags(metaclass=WithIter): + """ + CI Customization tags (set via PR body or some of them in GH labels, e.g. libFuzzer) + """ + + DO_NOT_TEST_LABEL = "do_not_test" + NO_MERGE_COMMIT = "no_merge_commit" + NO_CI_CACHE = "no_ci_cache" + # to upload all binaries from build jobs + UPLOAD_ALL_ARTIFACTS = "upload_all" + CI_SET_SYNC = "ci_set_sync" + CI_SET_ARM = "ci_set_arm" + CI_SET_REQUIRED = "ci_set_required" + CI_SET_BUILDS = "ci_set_builds" + CI_SET_NON_REQUIRED = "ci_set_non_required" + CI_SET_OLD_ANALYZER = "ci_set_old_analyzer" + + libFuzzer = "libFuzzer" + + +class BuildNames(metaclass=WithIter): + """ + Build' job names + """ + + PACKAGE_RELEASE = "package_release" + PACKAGE_AARCH64 = "package_aarch64" + PACKAGE_ASAN = "package_asan" + PACKAGE_UBSAN = "package_ubsan" + PACKAGE_TSAN = "package_tsan" + PACKAGE_MSAN = "package_msan" + PACKAGE_DEBUG = "package_debug" + PACKAGE_RELEASE_COVERAGE = "package_release_coverage" + BINARY_RELEASE = "binary_release" + BINARY_TIDY = "binary_tidy" + BINARY_DARWIN = "binary_darwin" + BINARY_AARCH64 = "binary_aarch64" + BINARY_AARCH64_V80COMPAT = "binary_aarch64_v80compat" + BINARY_FREEBSD = "binary_freebsd" + BINARY_DARWIN_AARCH64 = "binary_darwin_aarch64" + BINARY_PPC64LE = "binary_ppc64le" + BINARY_AMD64_COMPAT = "binary_amd64_compat" + BINARY_AMD64_MUSL = "binary_amd64_musl" + BINARY_RISCV64 = "binary_riscv64" + BINARY_S390X = "binary_s390x" + BINARY_LOONGARCH64 = "binary_loongarch64" + FUZZERS = "fuzzers" + + +class JobNames(metaclass=WithIter): + """ + All CI non-build jobs (Build jobs are concatenated to this list via python hack) + """ + + STYLE_CHECK = "Style check" + FAST_TEST = "Fast test" + DOCKER_SERVER = "Docker server image" + DOCKER_KEEPER = "Docker keeper image" + INSTALL_TEST_AMD = "Install packages (release)" + INSTALL_TEST_ARM = "Install packages (aarch64)" + + STATELESS_TEST_DEBUG = "Stateless tests (debug)" + STATELESS_TEST_RELEASE = "Stateless tests (release)" + STATELESS_TEST_RELEASE_COVERAGE = "Stateless tests (coverage)" + STATELESS_TEST_AARCH64 = "Stateless tests (aarch64)" + STATELESS_TEST_ASAN = "Stateless tests (asan)" + STATELESS_TEST_TSAN = "Stateless tests (tsan)" + STATELESS_TEST_MSAN = "Stateless tests (msan)" + STATELESS_TEST_UBSAN = "Stateless tests (ubsan)" + STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE = ( + "Stateless tests (release, old analyzer, s3, DatabaseReplicated)" + ) + STATELESS_TEST_S3_DEBUG = "Stateless tests (debug, s3 storage)" + STATELESS_TEST_S3_TSAN = "Stateless tests (tsan, s3 storage)" + STATELESS_TEST_AZURE_ASAN = "Stateless tests (azure, asan)" + STATELESS_TEST_FLAKY_ASAN = "Stateless tests flaky check (asan)" + + STATEFUL_TEST_DEBUG = "Stateful tests (debug)" + STATEFUL_TEST_RELEASE = "Stateful tests (release)" + STATEFUL_TEST_RELEASE_COVERAGE = "Stateful tests (coverage)" + STATEFUL_TEST_AARCH64 = "Stateful tests (aarch64)" + STATEFUL_TEST_ASAN = "Stateful tests (asan)" + STATEFUL_TEST_TSAN = "Stateful tests (tsan)" + STATEFUL_TEST_MSAN = "Stateful tests (msan)" + STATEFUL_TEST_UBSAN = "Stateful tests (ubsan)" + STATEFUL_TEST_PARALLEL_REPL_RELEASE = "Stateful tests (release, ParallelReplicas)" + STATEFUL_TEST_PARALLEL_REPL_DEBUG = "Stateful tests (debug, ParallelReplicas)" + STATEFUL_TEST_PARALLEL_REPL_ASAN = "Stateful tests (asan, ParallelReplicas)" + STATEFUL_TEST_PARALLEL_REPL_MSAN = "Stateful tests (msan, ParallelReplicas)" + STATEFUL_TEST_PARALLEL_REPL_UBSAN = "Stateful tests (ubsan, ParallelReplicas)" + STATEFUL_TEST_PARALLEL_REPL_TSAN = "Stateful tests (tsan, ParallelReplicas)" + + STRESS_TEST_ASAN = "Stress test (asan)" + STRESS_TEST_TSAN = "Stress test (tsan)" + STRESS_TEST_UBSAN = "Stress test (ubsan)" + STRESS_TEST_MSAN = "Stress test (msan)" + STRESS_TEST_DEBUG = "Stress test (debug)" + STRESS_TEST_AZURE_TSAN = "Stress test (azure, tsan)" + STRESS_TEST_AZURE_MSAN = "Stress test (azure, msan)" + + INTEGRATION_TEST = "Integration tests (release)" + INTEGRATION_TEST_ASAN = "Integration tests (asan)" + INTEGRATION_TEST_ASAN_OLD_ANALYZER = "Integration tests (asan, old analyzer)" + INTEGRATION_TEST_TSAN = "Integration tests (tsan)" + INTEGRATION_TEST_ARM = "Integration tests (aarch64)" + INTEGRATION_TEST_FLAKY = "Integration tests flaky check (asan)" + + UPGRADE_TEST_DEBUG = "Upgrade check (debug)" + UPGRADE_TEST_ASAN = "Upgrade check (asan)" + UPGRADE_TEST_TSAN = "Upgrade check (tsan)" + UPGRADE_TEST_MSAN = "Upgrade check (msan)" + + UNIT_TEST = "Unit tests (release)" + UNIT_TEST_ASAN = "Unit tests (asan)" + UNIT_TEST_MSAN = "Unit tests (msan)" + UNIT_TEST_TSAN = "Unit tests (tsan)" + UNIT_TEST_UBSAN = "Unit tests (ubsan)" + + AST_FUZZER_TEST_DEBUG = "AST fuzzer (debug)" + AST_FUZZER_TEST_ASAN = "AST fuzzer (asan)" + AST_FUZZER_TEST_MSAN = "AST fuzzer (msan)" + AST_FUZZER_TEST_TSAN = "AST fuzzer (tsan)" + AST_FUZZER_TEST_UBSAN = "AST fuzzer (ubsan)" + + JEPSEN_KEEPER = "ClickHouse Keeper Jepsen" + JEPSEN_SERVER = "ClickHouse Server Jepsen" + + PERFORMANCE_TEST_AMD64 = "Performance Comparison (release)" + PERFORMANCE_TEST_ARM64 = "Performance Comparison (aarch64)" + + SQL_LOGIC_TEST = "Sqllogic test (release)" + + SQLANCER = "SQLancer (release)" + SQLANCER_DEBUG = "SQLancer (debug)" + SQLTEST = "SQLTest" + + COMPATIBILITY_TEST = "Compatibility check (release)" + COMPATIBILITY_TEST_ARM = "Compatibility check (aarch64)" + + CLICKBENCH_TEST = "ClickBench (release)" + CLICKBENCH_TEST_ARM = "ClickBench (aarch64)" + + LIBFUZZER_TEST = "libFuzzer tests" + + BUILD_CHECK = "ClickHouse build check" + # BUILD_CHECK_SPECIAL = "ClickHouse special build check" + + DOCS_CHECK = "Docs check" + BUGFIX_VALIDATE = "Bugfix validation" + + +# hack to concatenate Build and non-build jobs under JobNames class +for attr_name in dir(BuildNames): + if not attr_name.startswith("__") and not callable(getattr(BuildNames, attr_name)): + setattr(JobNames, attr_name, getattr(BuildNames, attr_name)) + + +class StatusNames(metaclass=WithIter): + """ + Class with statuses that aren't related to particular jobs + """ + + # overall CI report + CI = "CI running" + # mergeable status + MERGEABLE = "Mergeable Check" + # status of a sync pr + SYNC = "A Sync" + # PR formatting check status + PR_CHECK = "PR Check" + + +class SyncState(metaclass=WithIter): + PENDING = "awaiting merge" + MERGE_FAILED = "merge failed" + TESTING = "awaiting test results" + TESTS_FAILED = "tests failed" + COMPLETED = "completed" + + +@dataclass +class DigestConfig: + # all files, dirs to include into digest, glob supported + include_paths: List[Union[str, Path]] = field(default_factory=list) + # file suffixes to exclude from digest + exclude_files: List[str] = field(default_factory=list) + # directories to exclude from digest + exclude_dirs: List[Union[str, Path]] = field(default_factory=list) + # docker names to include into digest + docker: List[str] = field(default_factory=list) + # git submodules digest + git_submodules: bool = False + + +@dataclass +class LabelConfig: + """ + configures different CI scenarios per CI Tag/GH label + """ + + run_jobs: Iterable[str] = frozenset() + + +@dataclass +class BuildConfig: + name: str + compiler: str + package_type: Literal["deb", "binary", "fuzzers"] + additional_pkgs: bool = False + debug_build: bool = False + coverage: bool = False + sanitizer: str = "" + tidy: bool = False + # sparse_checkout is needed only to test the option itself. + # No particular sense to use it in every build, since it slows down the job. + sparse_checkout: bool = False + comment: str = "" + static_binary_name: str = "" + + def export_env(self, export: bool = False) -> str: + def process(field_name: str, field: Union[bool, str]) -> str: + if isinstance(field, bool): + field = str(field).lower() + elif not isinstance(field, str): + field = "" + if export: + return f"export BUILD_{field_name.upper()}={repr(field)}" + return f"BUILD_{field_name.upper()}={field}" + + return "\n".join(process(k, v) for k, v in self.__dict__.items()) + + +@dataclass +class JobConfig: + """ + contains config parameters for job execution in CI workflow + """ + + # GH Runner type (tag from @Runners) + runner_type: str + # used for config validation in ci unittests + job_name_keyword: str = "" + # builds required for the job (applicable for test jobs) + required_builds: Optional[List[str]] = None + # build config for the build job (applicable for builds) + build_config: Optional[BuildConfig] = None + # configures digest calculation for the job + digest: DigestConfig = field(default_factory=DigestConfig) + # will be triggered for the job if omitted in CI workflow yml + run_command: str = "" + # job timeout, seconds + timeout: Optional[int] = None + # sets number of batches for a multi-batch job + num_batches: int = 1 + # label that enables job in CI, if set digest isn't used + run_by_label: str = "" + # to run always regardless of the job digest or/and label + run_always: bool = False + # if the job needs to be run on the release branch, including master (building packages, docker server). + # NOTE: Subsequent runs on the same branch with the similar digest are still considered skip-able. + required_on_release_branch: bool = False + # job is for pr workflow only + pr_only: bool = False + # job is for release/master branches only + release_only: bool = False + # to randomly pick and run one job among jobs in the same @random_bucket (PR branches only). + random_bucket: str = "" + # Do not set it. A list of batches to run. It will be set in runtime in accordance with ci cache and ci settings + batches: Optional[List[int]] = None + # Do not set it. A list of batches to await. It will be set in runtime in accordance with ci cache and ci settings + pending_batches: Optional[List[int]] = None + + def with_properties(self, **kwargs: Any) -> "JobConfig": + res = copy.deepcopy(self) + for k, v in kwargs.items(): + assert hasattr(self, k), f"Setting invalid attribute [{k}]" + setattr(res, k, v) + return res + + def get_required_build(self) -> str: + assert self.required_builds + return self.required_builds[0] + + +class CommonJobConfigs: + """ + Common job configs + """ + + BUILD_REPORT = JobConfig( + job_name_keyword="build_check", + run_command="build_report_check.py", + digest=DigestConfig( + include_paths=[ + "./tests/ci/build_report_check.py", + "./tests/ci/upload_result_helper.py", + ], + ), + runner_type=Runners.STYLE_CHECKER_ARM, + ) + COMPATIBILITY_TEST = JobConfig( + job_name_keyword="compatibility", + digest=DigestConfig( + include_paths=["./tests/ci/compatibility_check.py"], + docker=["clickhouse/test-old-ubuntu", "clickhouse/test-old-centos"], + ), + run_command="compatibility_check.py", + runner_type=Runners.STYLE_CHECKER, + ) + INSTALL_TEST = JobConfig( + job_name_keyword="install", + digest=DigestConfig( + include_paths=["./tests/ci/install_check.py"], + docker=["clickhouse/install-deb-test", "clickhouse/install-rpm-test"], + ), + run_command='install_check.py "$CHECK_NAME"', + runner_type=Runners.STYLE_CHECKER, + timeout=900, + ) + STATELESS_TEST = JobConfig( + job_name_keyword="stateless", + digest=DigestConfig( + include_paths=[ + "./tests/ci/functional_test_check.py", + "./tests/queries/0_stateless/", + "./tests/clickhouse-test", + "./tests/config", + "./tests/*.txt", + ], + exclude_files=[".md"], + docker=["clickhouse/stateless-test"], + ), + run_command='functional_test_check.py "$CHECK_NAME"', + runner_type=Runners.FUNC_TESTER, + timeout=10800, + ) + STATEFUL_TEST = JobConfig( + job_name_keyword="stateful", + digest=DigestConfig( + include_paths=[ + "./tests/ci/functional_test_check.py", + "./tests/queries/1_stateful/", + "./tests/clickhouse-test", + "./tests/config", + "./tests/*.txt", + ], + exclude_files=[".md"], + docker=["clickhouse/stateful-test"], + ), + run_command='functional_test_check.py "$CHECK_NAME"', + runner_type=Runners.FUNC_TESTER, + timeout=3600, + ) + STRESS_TEST = JobConfig( + job_name_keyword="stress", + digest=DigestConfig( + include_paths=[ + "./tests/queries/0_stateless/", + "./tests/queries/1_stateful/", + "./tests/clickhouse-test", + "./tests/config", + "./tests/*.txt", + ], + exclude_files=[".md"], + docker=["clickhouse/stress-test"], + ), + run_command="stress_check.py", + runner_type=Runners.STRESS_TESTER, + timeout=9000, + ) + UPGRADE_TEST = JobConfig( + job_name_keyword="upgrade", + digest=DigestConfig( + include_paths=["./tests/ci/upgrade_check.py"], + exclude_files=[".md"], + docker=["clickhouse/upgrade-check"], + ), + run_command="upgrade_check.py", + runner_type=Runners.STRESS_TESTER, + ) + INTEGRATION_TEST = JobConfig( + job_name_keyword="integration", + digest=DigestConfig( + include_paths=[ + "./tests/ci/integration_test_check.py", + "./tests/ci/integration_tests_runner.py", + "./tests/integration/", + ], + exclude_files=[".md"], + docker=IMAGES.copy(), + ), + run_command='integration_test_check.py "$CHECK_NAME"', + runner_type=Runners.STRESS_TESTER, + ) + ASTFUZZER_TEST = JobConfig( + job_name_keyword="ast", + digest=DigestConfig(), + run_command="ast_fuzzer_check.py", + run_always=True, + runner_type=Runners.FUZZER_UNIT_TESTER, + ) + UNIT_TEST = JobConfig( + job_name_keyword="unit", + digest=DigestConfig( + include_paths=["./tests/ci/unit_tests_check.py"], + exclude_files=[".md"], + docker=["clickhouse/unit-test"], + ), + run_command="unit_tests_check.py", + runner_type=Runners.FUZZER_UNIT_TESTER, + ) + PERF_TESTS = JobConfig( + job_name_keyword="performance", + digest=DigestConfig( + include_paths=[ + "./tests/ci/performance_comparison_check.py", + "./tests/performance/", + ], + exclude_files=[".md"], + docker=["clickhouse/performance-comparison"], + ), + run_command="performance_comparison_check.py", + runner_type=Runners.STRESS_TESTER, + ) + SQLLANCER_TEST = JobConfig( + job_name_keyword="lancer", + digest=DigestConfig(), + run_command="sqlancer_check.py", + release_only=True, + run_always=True, + runner_type=Runners.FUZZER_UNIT_TESTER, + ) + SQLLOGIC_TEST = JobConfig( + job_name_keyword="logic", + digest=DigestConfig( + include_paths=["./tests/ci/sqllogic_test.py"], + exclude_files=[".md"], + docker=["clickhouse/sqllogic-test"], + ), + run_command="sqllogic_test.py", + timeout=10800, + release_only=True, + runner_type=Runners.STYLE_CHECKER, + ) + SQL_TEST = JobConfig( + job_name_keyword="sqltest", + digest=DigestConfig( + include_paths=["./tests/ci/sqltest.py"], + exclude_files=[".md"], + docker=["clickhouse/sqltest"], + ), + run_command="sqltest.py", + timeout=10800, + release_only=True, + runner_type=Runners.FUZZER_UNIT_TESTER, + ) + BUGFIX_TEST = JobConfig( + job_name_keyword="bugfix", + digest=DigestConfig(), + run_command="bugfix_validate_check.py", + timeout=900, + runner_type=Runners.FUNC_TESTER, + ) + DOCKER_SERVER = JobConfig( + job_name_keyword="docker", + required_on_release_branch=True, + run_command='docker_server.py --check-name "$CHECK_NAME" --release-type head --allow-build-reuse', + digest=DigestConfig( + include_paths=[ + "tests/ci/docker_server.py", + "./docker/server", + ] + ), + runner_type=Runners.STYLE_CHECKER, + ) + CLICKBENCH_TEST = JobConfig( + job_name_keyword="clickbench", + digest=DigestConfig( + include_paths=[ + "tests/ci/clickbench.py", + ], + docker=["clickhouse/clickbench"], + ), + run_command='clickbench.py "$CHECK_NAME"', + timeout=900, + runner_type=Runners.FUNC_TESTER, + ) + BUILD = JobConfig( + required_on_release_branch=True, + digest=DigestConfig( + include_paths=[ + "./src", + "./contrib/*-cmake", + "./contrib/consistent-hashing", + "./contrib/murmurhash", + "./contrib/libfarmhash", + "./contrib/pdqsort", + "./contrib/cityhash102", + "./contrib/sparse-checkout", + "./contrib/libmetrohash", + "./contrib/update-submodules.sh", + "./contrib/CMakeLists.txt", + "./CMakeLists.txt", + "./PreLoad.cmake", + "./cmake", + "./base", + "./programs", + "./packages", + "./docker/packager/packager", + "./rust", + "./tests/ci/version_helper.py", + # FIXME: This is a WA to rebuild the CH and recreate the Performance.tar.zst artifact + # when there are changes in performance test scripts. + # Due to the current design of the perf test we need to rebuild CH when the performance test changes, + # otherwise the changes will not be visible in the PerformanceTest job in CI + "./tests/performance", + ], + exclude_files=[".md"], + docker=["clickhouse/binary-builder"], + git_submodules=True, + ), + run_command="build_check.py $BUILD_NAME", + runner_type=Runners.BUILDER, + ) + + +REQUIRED_CHECKS = [ + StatusNames.PR_CHECK, + StatusNames.SYNC, + JobNames.BUILD_CHECK, + JobNames.DOCS_CHECK, + JobNames.FAST_TEST, + JobNames.STATEFUL_TEST_RELEASE, + JobNames.STATELESS_TEST_RELEASE, + JobNames.STATELESS_TEST_ASAN, + JobNames.STATELESS_TEST_FLAKY_ASAN, + JobNames.STATEFUL_TEST_ASAN, + JobNames.STYLE_CHECK, + JobNames.UNIT_TEST_ASAN, + JobNames.UNIT_TEST_MSAN, + JobNames.UNIT_TEST, + JobNames.UNIT_TEST_TSAN, + JobNames.UNIT_TEST_UBSAN, + JobNames.INTEGRATION_TEST_ASAN_OLD_ANALYZER, + JobNames.STATELESS_TEST_OLD_ANALYZER_S3_REPLICATED_RELEASE, +] + +# Jobs that run in Merge Queue if it's enabled +MQ_JOBS = [ + JobNames.STYLE_CHECK, + JobNames.FAST_TEST, + BuildNames.BINARY_RELEASE, + JobNames.UNIT_TEST, +] + + +@dataclass +class CheckDescription: + name: str + description: str # the check descriptions, will be put into the status table + match_func: Callable[[str], bool] # the function to check vs the commit status + + def __hash__(self) -> int: + return hash(self.name + self.description) + + +CHECK_DESCRIPTIONS = [ + CheckDescription( + StatusNames.PR_CHECK, + "Checks correctness of the PR's body", + lambda x: x == "PR Check", + ), + CheckDescription( + StatusNames.SYNC, + "If it fails, ask a maintainer for help", + lambda x: x == StatusNames.SYNC, + ), + CheckDescription( + "AST fuzzer", + "Runs randomly generated queries to catch program errors. " + "The build type is optionally given in parenthesis. " + "If it fails, ask a maintainer for help", + lambda x: x.startswith("AST fuzzer"), + ), + CheckDescription( + JobNames.BUGFIX_VALIDATE, + "Checks that either a new test (functional or integration) or there " + "some changed tests that fail with the binary built on master branch", + lambda x: x == JobNames.BUGFIX_VALIDATE, + ), + CheckDescription( + StatusNames.CI, + "A meta-check that indicates the running CI. Normally, it's in success or " + "pending state. The failed status indicates some problems with the PR", + lambda x: x == "CI running", + ), + CheckDescription( + "ClickHouse build check", + "Builds ClickHouse in various configurations for use in further steps. " + "You have to fix the builds that fail. Build logs often has enough " + "information to fix the error, but you might have to reproduce the failure " + "locally. The cmake options can be found in the build log, grepping for " + 'cmake. Use these options and follow the general build process', + lambda x: x.startswith("ClickHouse") and x.endswith("build check"), + ), + CheckDescription( + "Compatibility check", + "Checks that clickhouse binary runs on distributions with old libc " + "versions. If it fails, ask a maintainer for help", + lambda x: x.startswith("Compatibility check"), + ), + CheckDescription( + JobNames.DOCKER_SERVER, + "The check to build and optionally push the mentioned image to docker hub", + lambda x: x.startswith("Docker server"), + ), + CheckDescription( + JobNames.DOCKER_KEEPER, + "The check to build and optionally push the mentioned image to docker hub", + lambda x: x.startswith("Docker keeper"), + ), + CheckDescription( + JobNames.DOCS_CHECK, + "Builds and tests the documentation", + lambda x: x == JobNames.DOCS_CHECK, + ), + CheckDescription( + JobNames.FAST_TEST, + "Normally this is the first check that is ran for a PR. It builds ClickHouse " + 'and runs most of stateless functional tests, ' + "omitting some. If it fails, further checks are not started until it is fixed. " + "Look at the report to see which tests fail, then reproduce the failure " + 'locally as described here', + lambda x: x == JobNames.FAST_TEST, + ), + CheckDescription( + "Flaky tests", + "Checks if new added or modified tests are flaky by running them repeatedly, " + "in parallel, with more randomization. Functional tests are run 100 times " + "with address sanitizer, and additional randomization of thread scheduling. " + "Integration tests are run up to 10 times. If at least once a new test has " + "failed, or was too long, this check will be red. We don't allow flaky tests, " + 'read the doc', + lambda x: "tests flaky check" in x, + ), + CheckDescription( + "Install packages", + "Checks that the built packages are installable in a clear environment", + lambda x: x.startswith("Install packages ("), + ), + CheckDescription( + "Integration tests", + "The integration tests report. In parenthesis the package type is given, " + "and in square brackets are the optional part/total tests", + lambda x: x.startswith("Integration tests ("), + ), + CheckDescription( + StatusNames.MERGEABLE, + "Checks if all other necessary checks are successful", + lambda x: x == StatusNames.MERGEABLE, + ), + CheckDescription( + "Performance Comparison", + "Measure changes in query performance. The performance test report is " + 'described in detail here. ' + "In square brackets are the optional part/total tests", + lambda x: x.startswith("Performance Comparison"), + ), + CheckDescription( + "Push to Dockerhub", + "The check for building and pushing the CI related docker images to docker hub", + lambda x: x.startswith("Push") and "to Dockerhub" in x, + ), + CheckDescription( + "Sqllogic", + "Run clickhouse on the " + 'sqllogic ' + "test set against sqlite and checks that all statements are passed", + lambda x: x.startswith("Sqllogic test"), + ), + CheckDescription( + "SQLancer", + "Fuzzing tests that detect logical bugs with " + 'SQLancer tool', + lambda x: x.startswith("SQLancer"), + ), + CheckDescription( + "Stateful tests", + "Runs stateful functional tests for ClickHouse binaries built in various " + "configurations -- release, debug, with sanitizers, etc", + lambda x: x.startswith("Stateful tests ("), + ), + CheckDescription( + "Stateless tests", + "Runs stateless functional tests for ClickHouse binaries built in various " + "configurations -- release, debug, with sanitizers, etc", + lambda x: x.startswith("Stateless tests ("), + ), + CheckDescription( + "Stress test", + "Runs stateless functional tests concurrently from several clients to detect " + "concurrency-related errors", + lambda x: x.startswith("Stress test ("), + ), + CheckDescription( + JobNames.STYLE_CHECK, + "Runs a set of checks to keep the code style clean. If some of tests failed, " + "see the related log from the report", + lambda x: x == JobNames.STYLE_CHECK, + ), + CheckDescription( + "Unit tests", + "Runs the unit tests for different release types", + lambda x: x.startswith("Unit tests ("), + ), + CheckDescription( + "Upgrade check", + "Runs stress tests on server version from last release and then tries to " + "upgrade it to the version from the PR. It checks if the new server can " + "successfully startup without any errors, crashes or sanitizer asserts", + lambda x: x.startswith("Upgrade check ("), + ), + CheckDescription( + "ClickBench", + "Runs [ClickBench](https://github.com/ClickHouse/ClickBench/) with instant-attach table", + lambda x: x.startswith("ClickBench"), + ), + CheckDescription( + "Fallback for unknown", + "There's no description for the check yet, please add it to " + "tests/ci/ci_config.py:CHECK_DESCRIPTIONS", + lambda x: True, + ), +] diff --git a/tests/ci/ci_settings.py b/tests/ci/ci_settings.py index f25344c7701..a36fcf953ae 100644 --- a/tests/ci/ci_settings.py +++ b/tests/ci/ci_settings.py @@ -3,7 +3,7 @@ from dataclasses import dataclass, asdict from typing import Optional, List, Dict, Any, Iterable from ci_utils import normalize_string -from ci_config import CILabels, CI_CONFIG, JobConfig, JobNames +from ci_config import CI from git_helper import Runner as GitRunner, GIT_PREFIX from pr_info import PRInfo @@ -80,7 +80,7 @@ class CiSettings: if not res.ci_jobs: res.ci_jobs = [] res.ci_jobs.append(match.removeprefix("job_")) - elif match.startswith("ci_set_") and match in CILabels: + elif match.startswith("ci_set_") and match in CI.Tags: if not res.ci_sets: res.ci_sets = [] res.ci_sets.append(match) @@ -97,15 +97,15 @@ class CiSettings: res.exclude_keywords += [ normalize_string(keyword) for keyword in keywords ] - elif match == CILabels.NO_CI_CACHE: + elif match == CI.Tags.NO_CI_CACHE: res.no_ci_cache = True print("NOTE: CI Cache will be disabled") - elif match == CILabels.UPLOAD_ALL_ARTIFACTS: + elif match == CI.Tags.UPLOAD_ALL_ARTIFACTS: res.upload_all = True print("NOTE: All binary artifacts will be uploaded") - elif match == CILabels.DO_NOT_TEST_LABEL: + elif match == CI.Tags.DO_NOT_TEST_LABEL: res.do_not_test = True - elif match == CILabels.NO_MERGE_COMMIT: + elif match == CI.Tags.NO_MERGE_COMMIT: res.no_merge_commit = True print("NOTE: Merge Commit will be disabled") elif match.startswith("batch_"): @@ -131,18 +131,18 @@ class CiSettings: def _check_if_selected( self, job: str, - job_config: JobConfig, + job_config: CI.JobConfig, is_release: bool, is_pr: bool, is_mq: bool, labels: Iterable[str], ) -> bool: # type: ignore #too-many-return-statements if self.do_not_test: - label_config = CI_CONFIG.get_label_config(CILabels.DO_NOT_TEST_LABEL) - assert label_config, f"Unknown tag [{CILabels.DO_NOT_TEST_LABEL}]" + label_config = CI.get_tag_config(CI.Tags.DO_NOT_TEST_LABEL) + assert label_config, f"Unknown tag [{CI.Tags.DO_NOT_TEST_LABEL}]" if job in label_config.run_jobs: print( - f"Job [{job}] present in CI set [{CILabels.DO_NOT_TEST_LABEL}] - pass" + f"Job [{job}] present in CI set [{CI.Tags.DO_NOT_TEST_LABEL}] - pass" ) return True return False @@ -164,7 +164,7 @@ class CiSettings: to_deny = False if self.include_keywords: - if job == JobNames.STYLE_CHECK: + if job == CI.JobNames.STYLE_CHECK: # never exclude Style Check by include keywords return True for keyword in self.include_keywords: @@ -175,7 +175,7 @@ class CiSettings: if self.ci_sets: for tag in self.ci_sets: - label_config = CI_CONFIG.get_label_config(tag) + label_config = CI.get_tag_config(tag) assert label_config, f"Unknown tag [{tag}]" if job in label_config.run_jobs: print(f"Job [{job}] present in CI set [{tag}] - pass") @@ -197,12 +197,12 @@ class CiSettings: def apply( self, - job_configs: Dict[str, JobConfig], + job_configs: Dict[str, CI.JobConfig], is_release: bool, is_pr: bool, is_mq: bool, labels: Iterable[str], - ) -> Dict[str, JobConfig]: + ) -> Dict[str, CI.JobConfig]: """ Apply CI settings from pr body """ @@ -220,7 +220,7 @@ class CiSettings: add_parents = [] for job in list(res): - parent_jobs = CI_CONFIG.get_job_parents(job) + parent_jobs = CI.get_job_parents(job) for parent_job in parent_jobs: if parent_job not in res: add_parents.append(parent_job) diff --git a/tests/ci/commit_status_helper.py b/tests/ci/commit_status_helper.py index a0d6495452f..96cf700ed77 100644 --- a/tests/ci/commit_status_helper.py +++ b/tests/ci/commit_status_helper.py @@ -17,7 +17,7 @@ from github.GithubObject import NotSet from github.IssueComment import IssueComment from github.Repository import Repository -from ci_config import CHECK_DESCRIPTIONS, CheckDescription, StatusNames, CIConfig +from ci_config import CI from env_helper import GITHUB_REPOSITORY, GITHUB_UPSTREAM_REPOSITORY, TEMP_PATH from lambda_shared_package.lambda_shared.pr import Labels from pr_info import PRInfo @@ -160,7 +160,7 @@ def set_status_comment(commit: Commit, pr_info: PRInfo) -> None: if not statuses: return - if not [status for status in statuses if status.context == StatusNames.CI]: + if not [status for status in statuses if status.context == CI.StatusNames.CI]: # This is the case, when some statuses already exist for the check, # but not the StatusNames.CI. We should create it as pending. # W/o pr_info to avoid recursion, and yes, one extra create_ci_report @@ -169,7 +169,7 @@ def set_status_comment(commit: Commit, pr_info: PRInfo) -> None: PENDING, create_ci_report(pr_info, statuses), "The report for running CI", - StatusNames.CI, + CI.StatusNames.CI, ) # We update the report in generate_status_comment function, so do it each @@ -212,20 +212,20 @@ def generate_status_comment(pr_info: PRInfo, statuses: CommitStatuses) -> str: f"\n" ) # group checks by the name to get the worst one per each - grouped_statuses = {} # type: Dict[CheckDescription, CommitStatuses] + grouped_statuses = {} # type: Dict[CI.CheckDescription, CommitStatuses] for status in statuses: cd = None - for c in CHECK_DESCRIPTIONS: + for c in CI.CHECK_DESCRIPTIONS: if c.match_func(status.context): cd = c break - if cd is None or cd == CHECK_DESCRIPTIONS[-1]: + if cd is None or cd == CI.CHECK_DESCRIPTIONS[-1]: # This is the case for either non-found description or a fallback - cd = CheckDescription( + cd = CI.CheckDescription( status.context, - CHECK_DESCRIPTIONS[-1].description, - CHECK_DESCRIPTIONS[-1].match_func, + CI.CHECK_DESCRIPTIONS[-1].description, + CI.CHECK_DESCRIPTIONS[-1].match_func, ) if cd in grouped_statuses: @@ -301,7 +301,7 @@ def create_ci_report(pr_info: PRInfo, statuses: CommitStatuses) -> str: ) ) return upload_results( - S3Helper(), pr_info.number, pr_info.sha, test_results, [], StatusNames.CI + S3Helper(), pr_info.number, pr_info.sha, test_results, [], CI.StatusNames.CI ) @@ -435,7 +435,7 @@ def set_mergeable_check( state, report_url, format_description(description), - StatusNames.MERGEABLE, + CI.StatusNames.MERGEABLE, ) @@ -443,7 +443,7 @@ def update_mergeable_check(commit: Commit, pr_info: PRInfo, check_name: str) -> "check if the check_name in REQUIRED_CHECKS and then trigger update" not_run = ( pr_info.labels.intersection({Labels.SKIP_MERGEABLE_CHECK, Labels.RELEASE}) - or not CIConfig.is_required(check_name) + or not CI.is_required(check_name) or pr_info.release_pr or pr_info.number == 0 ) @@ -465,13 +465,11 @@ def trigger_mergeable_check( workflow_failed: bool = False, ) -> StatusType: """calculate and update StatusNames.MERGEABLE""" - required_checks = [ - status for status in statuses if CIConfig.is_required(status.context) - ] + required_checks = [status for status in statuses if CI.is_required(status.context)] mergeable_status = None for status in statuses: - if status.context == StatusNames.MERGEABLE: + if status.context == CI.StatusNames.MERGEABLE: mergeable_status = status break @@ -548,7 +546,7 @@ def update_upstream_sync_status( "Using commit %s to post the %s status `%s`: [%s]", last_synced_upstream_commit.sha, sync_status, - StatusNames.SYNC, + CI.StatusNames.SYNC, "", ) post_commit_status( @@ -556,7 +554,7 @@ def update_upstream_sync_status( sync_status, "", "", - StatusNames.SYNC, + CI.StatusNames.SYNC, ) trigger_mergeable_check( last_synced_upstream_commit, diff --git a/tests/ci/digest_helper.py b/tests/ci/digest_helper.py index 8d6ec127f6e..4dcfb03c04f 100644 --- a/tests/ci/digest_helper.py +++ b/tests/ci/digest_helper.py @@ -9,10 +9,10 @@ from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Union from sys import modules from docker_images_helper import get_images_info -from ci_config import DigestConfig from git_helper import Runner from env_helper import ROOT_DIR from ci_utils import cd +from ci_config import CI DOCKER_DIGEST_LEN = 12 JOB_DIGEST_LEN = 10 @@ -139,20 +139,21 @@ class DockerDigester: class JobDigester: - def __init__(self): + def __init__(self, dry_run: bool = False): self.dd = DockerDigester() self.cache: Dict[str, str] = {} + self.dry_run = dry_run @staticmethod - def _get_config_hash(digest_config: DigestConfig) -> str: + def _get_config_hash(digest_config: CI.DigestConfig) -> str: data_dict = asdict(digest_config) hash_obj = md5() hash_obj.update(str(data_dict).encode()) hash_string = hash_obj.hexdigest() return hash_string - def get_job_digest(self, digest_config: DigestConfig) -> str: - if not digest_config.include_paths: + def get_job_digest(self, digest_config: CI.DigestConfig) -> str: + if not digest_config.include_paths or self.dry_run: # job is not for digest return "f" * JOB_DIGEST_LEN diff --git a/tests/ci/download_binary.py b/tests/ci/download_binary.py index 79db1e57d62..b0b5659ca83 100755 --- a/tests/ci/download_binary.py +++ b/tests/ci/download_binary.py @@ -8,7 +8,7 @@ import logging from pathlib import Path from build_download_helper import download_build_with_progress -from ci_config import CI_CONFIG +from ci_config import CI from env_helper import RUNNER_TEMP, S3_ARTIFACT_DOWNLOAD_TEMPLATE from git_helper import Git, commit from version_helper import get_version_from_repo, version_arg @@ -59,7 +59,8 @@ def main(): temp_path.mkdir(parents=True, exist_ok=True) for build in args.build_names: # check if it's in CI_CONFIG - config = CI_CONFIG.build_config[build] + config = CI.JOB_CONFIGS[build].build_config + assert config if args.rename and config.static_binary_name: path = temp_path / f"clickhouse-{config.static_binary_name}" else: diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 36732bd7c9f..5217e4035da 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -9,8 +9,9 @@ from build_download_helper import APIException, get_gh_api module_dir = p.abspath(p.dirname(__file__)) git_root = p.abspath(p.join(module_dir, "..", "..")) + ROOT_DIR = git_root -CI = bool(os.getenv("CI")) +IS_CI = bool(os.getenv("CI")) TEMP_PATH = os.getenv("TEMP_PATH", p.abspath(p.join(module_dir, "./tmp"))) REPORT_PATH = f"{TEMP_PATH}/reports" # FIXME: latest should not be used in CI, set temporary for transition to "docker with digest as a tag" diff --git a/tests/ci/finish_check.py b/tests/ci/finish_check.py index 904b565ad86..bd83f875790 100644 --- a/tests/ci/finish_check.py +++ b/tests/ci/finish_check.py @@ -4,7 +4,7 @@ import logging from github import Github -from ci_config import StatusNames +from ci_config import CI from commit_status_helper import ( get_commit, get_commit_filtered_statuses, @@ -71,7 +71,7 @@ def main(): can_set_green_mergeable_status=True, ) - ci_running_statuses = [s for s in statuses if s.context == StatusNames.CI] + ci_running_statuses = [s for s in statuses if s.context == CI.StatusNames.CI] if not ci_running_statuses: return # Take the latest status @@ -81,7 +81,11 @@ def main(): has_pending = False error_cnt = 0 for status in statuses: - if status.context in (StatusNames.MERGEABLE, StatusNames.CI, StatusNames.SYNC): + if status.context in ( + CI.StatusNames.MERGEABLE, + CI.StatusNames.CI, + CI.StatusNames.SYNC, + ): # do not account these statuses continue if status.state == PENDING: @@ -108,7 +112,7 @@ def main(): ci_state, ci_status.target_url, description, - StatusNames.CI, + CI.StatusNames.CI, pr_info, dump_to_file=True, ) diff --git a/tests/ci/integration_tests_runner.py b/tests/ci/integration_tests_runner.py index 4abaeac30b7..87f721cfde7 100755 --- a/tests/ci/integration_tests_runner.py +++ b/tests/ci/integration_tests_runner.py @@ -18,7 +18,7 @@ from collections import defaultdict from itertools import chain from typing import Any, Dict -from env_helper import CI +from env_helper import IS_CI from integration_test_images import IMAGES MAX_RETRY = 1 @@ -1004,7 +1004,7 @@ def run(): logging.info("Running tests") - if CI: + if IS_CI: # Avoid overlaps with previous runs logging.info("Clearing dmesg before run") subprocess.check_call("sudo -E dmesg --clear", shell=True) @@ -1012,7 +1012,7 @@ def run(): state, description, test_results, _ = runner.run_impl(repo_path, build_path) logging.info("Tests finished") - if CI: + if IS_CI: # Dump dmesg (to capture possible OOMs) logging.info("Dumping dmesg") subprocess.check_call("sudo -E dmesg -T", shell=True) diff --git a/tests/ci/jepsen_check.py b/tests/ci/jepsen_check.py index 1e61fd9fab7..f91a3f080c0 100644 --- a/tests/ci/jepsen_check.py +++ b/tests/ci/jepsen_check.py @@ -13,7 +13,6 @@ import requests from build_download_helper import ( download_build_with_progress, - get_build_name_for_check, read_build_urls, ) from compress_files import compress_fast @@ -25,6 +24,7 @@ from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults from ssh import SSHKey from stopwatch import Stopwatch from tee_popen import TeePopen +from ci_config import CI JEPSEN_GROUP_NAME = "jepsen_group" @@ -224,7 +224,7 @@ def main(): head = requests.head(build_url, timeout=60) assert head.status_code == 200, f"Clickhouse binary not found: {build_url}" else: - build_name = get_build_name_for_check(check_name) + build_name = CI.get_required_build_name(check_name) urls = read_build_urls(build_name, REPORT_PATH) build_url = None for url in urls: diff --git a/tests/ci/performance_comparison_check.py b/tests/ci/performance_comparison_check.py index 0c779b515bd..b3488ac0af2 100644 --- a/tests/ci/performance_comparison_check.py +++ b/tests/ci/performance_comparison_check.py @@ -12,7 +12,7 @@ from pathlib import Path from github import Github from build_download_helper import download_builds_filter -from ci_config import CI_CONFIG +from ci_config import CI from clickhouse_helper import get_instance_id, get_instance_type from commit_status_helper import get_commit from docker_images_helper import get_docker_image, pull_image @@ -83,7 +83,7 @@ def main(): assert ( check_name ), "Check name must be provided as an input arg or in CHECK_NAME env" - required_build = CI_CONFIG.test_configs[check_name].required_build + required_build = CI.JOB_CONFIGS[check_name].get_required_build() with open(GITHUB_EVENT_PATH, "r", encoding="utf-8") as event_file: event = json.load(event_file) diff --git a/tests/ci/pr_info.py b/tests/ci/pr_info.py index dda5b30f1e3..fb25a29cc57 100644 --- a/tests/ci/pr_info.py +++ b/tests/ci/pr_info.py @@ -316,7 +316,9 @@ class PRInfo: @property def is_master(self) -> bool: - return self.number == 0 and self.head_ref == "master" + return ( + self.number == 0 and self.head_ref == "master" and not self.is_merge_queue + ) @property def is_release(self) -> bool: @@ -324,7 +326,10 @@ class PRInfo: @property def is_pr(self): - return self.event_type == EventType.PULL_REQUEST + if self.event_type == EventType.PULL_REQUEST: + assert self.number + return True + return False @property def is_scheduled(self) -> bool: @@ -353,9 +358,6 @@ class PRInfo: if self.changed_files_requested: return - if not getattr(self, "diff_urls", False): - raise TypeError("The event does not have diff URLs") - for diff_url in self.diff_urls: response = get_gh_api( diff_url, diff --git a/tests/ci/report.py b/tests/ci/report.py index ee58efdba52..bdaa2e15130 100644 --- a/tests/ci/report.py +++ b/tests/ci/report.py @@ -21,7 +21,7 @@ from typing import ( ) from build_download_helper import get_gh_api -from ci_config import CI_CONFIG, BuildConfig +from ci_config import CI from ci_utils import normalize_string from env_helper import REPORT_PATH, TEMP_PATH @@ -412,6 +412,7 @@ class BuildResult: ref_report = None master_report = None any_report = None + Path(REPORT_PATH).mkdir(parents=True, exist_ok=True) for file in Path(REPORT_PATH).iterdir(): if f"{build_name}.json" in file.name: any_report = file @@ -448,8 +449,10 @@ class BuildResult: return json.dumps(asdict(self), indent=2) @property - def build_config(self) -> Optional[BuildConfig]: - return CI_CONFIG.build_config.get(self.build_name, None) + def build_config(self) -> Optional[CI.BuildConfig]: + if self.build_name not in CI.JOB_CONFIGS: + return None + return CI.JOB_CONFIGS[self.build_name].build_config @property def comment(self) -> str: diff --git a/tests/ci/run_check.py b/tests/ci/run_check.py index 131cbeef786..00942352dde 100644 --- a/tests/ci/run_check.py +++ b/tests/ci/run_check.py @@ -5,7 +5,6 @@ from typing import Tuple from github import Github -from ci_config import StatusNames from commit_status_helper import ( create_ci_report, format_description, @@ -24,6 +23,7 @@ from lambda_shared_package.lambda_shared.pr import ( ) from pr_info import PRInfo from report import FAILURE, PENDING, SUCCESS, StatusType +from ci_config import CI TRUSTED_ORG_IDS = { 54801242, # clickhouse @@ -208,7 +208,7 @@ def main(): PENDING, ci_report_url, description, - StatusNames.CI, + CI.StatusNames.CI, pr_info, ) diff --git a/tests/ci/s3_helper.py b/tests/ci/s3_helper.py index bff53f00ad3..da0ccb2b74d 100644 --- a/tests/ci/s3_helper.py +++ b/tests/ci/s3_helper.py @@ -11,7 +11,7 @@ import boto3 # type: ignore import botocore # type: ignore from compress_files import compress_file_fast from env_helper import ( - CI, + IS_CI, RUNNER_TEMP, S3_BUILDS_BUCKET, S3_DOWNLOAD, @@ -111,13 +111,13 @@ class S3Helper: self.client.delete_object(Bucket=bucket_name, Key=s3_path) def upload_test_report_to_s3(self, file_path: Path, s3_path: str) -> str: - if CI: + if IS_CI: return self._upload_file_to_s3(S3_TEST_REPORTS_BUCKET, file_path, s3_path) return S3Helper.copy_file_to_local(S3_TEST_REPORTS_BUCKET, file_path, s3_path) def upload_build_file_to_s3(self, file_path: Path, s3_path: str) -> str: - if CI: + if IS_CI: return self._upload_file_to_s3(S3_BUILDS_BUCKET, file_path, s3_path) return S3Helper.copy_file_to_local(S3_BUILDS_BUCKET, file_path, s3_path) @@ -255,7 +255,7 @@ class S3Helper: if full_fs_path.is_symlink(): if upload_symlinks: - if CI: + if IS_CI: return self._upload_file_to_s3( bucket_name, full_fs_path, @@ -266,7 +266,7 @@ class S3Helper: ) return [] - if CI: + if IS_CI: return self._upload_file_to_s3( bucket_name, full_fs_path, full_s3_path + "/" + file_path.name ) @@ -331,7 +331,7 @@ class S3Helper: return result def url_if_exists(self, key: str, bucket: str = S3_BUILDS_BUCKET) -> str: - if not CI: + if not IS_CI: local_path = self.local_path(bucket, key) if local_path.exists(): return local_path.as_uri() @@ -345,7 +345,7 @@ class S3Helper: @staticmethod def get_url(bucket: str, key: str) -> str: - if CI: + if IS_CI: return S3Helper.s3_url(bucket, key) return S3Helper.local_path(bucket, key).as_uri() diff --git a/tests/ci/sqlancer_check.py b/tests/ci/sqlancer_check.py index 9d33c480598..a68db8b9791 100644 --- a/tests/ci/sqlancer_check.py +++ b/tests/ci/sqlancer_check.py @@ -6,12 +6,13 @@ import subprocess import sys from pathlib import Path -from build_download_helper import get_build_name_for_check, read_build_urls +from build_download_helper import read_build_urls from docker_images_helper import DockerImage, get_docker_image, pull_image from env_helper import REPORT_PATH, TEMP_PATH from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults from stopwatch import Stopwatch from tee_popen import TeePopen +from ci_config import CI IMAGE_NAME = "clickhouse/sqlancer-test" @@ -43,7 +44,7 @@ def main(): docker_image = pull_image(get_docker_image(IMAGE_NAME)) - build_name = get_build_name_for_check(check_name) + build_name = CI.get_required_build_name(check_name) urls = read_build_urls(build_name, reports_path) if not urls: raise ValueError("No build URLs found") diff --git a/tests/ci/sqltest.py b/tests/ci/sqltest.py index c8c2adbbd56..8e6ca6ff87f 100644 --- a/tests/ci/sqltest.py +++ b/tests/ci/sqltest.py @@ -6,12 +6,13 @@ import subprocess import sys from pathlib import Path -from build_download_helper import get_build_name_for_check, read_build_urls +from build_download_helper import read_build_urls from docker_images_helper import get_docker_image, pull_image from env_helper import REPORT_PATH, TEMP_PATH from pr_info import PRInfo from report import SUCCESS, JobReport, TestResult from stopwatch import Stopwatch +from ci_config import CI IMAGE_NAME = "clickhouse/sqltest" @@ -49,7 +50,7 @@ def main(): docker_image = pull_image(get_docker_image(IMAGE_NAME)) - build_name = get_build_name_for_check(check_name) + build_name = CI.get_required_build_name(check_name) print(build_name) urls = read_build_urls(build_name, reports_path) if not urls: diff --git a/tests/ci/style_check.py b/tests/ci/style_check.py index 9906d87a8c0..35e48246be9 100644 --- a/tests/ci/style_check.py +++ b/tests/ci/style_check.py @@ -13,7 +13,7 @@ from typing import List, Tuple, Union import magic from docker_images_helper import get_docker_image, pull_image -from env_helper import CI, REPO_COPY, TEMP_PATH +from env_helper import IS_CI, REPO_COPY, TEMP_PATH from git_helper import GIT_PREFIX, git_runner from pr_info import PRInfo from report import ERROR, FAILURE, SUCCESS, JobReport, TestResults, read_test_results @@ -152,7 +152,7 @@ def main(): run_cpp_check = True run_shell_check = True run_python_check = True - if CI and pr_info.number > 0: + if IS_CI and pr_info.number > 0: pr_info.fetch_changed_files() run_cpp_check = any( not (is_python(file) or is_shell(file)) for file in pr_info.changed_files diff --git a/tests/ci/sync_pr.py b/tests/ci/sync_pr.py index f1073603e8d..8251ccbaf38 100644 --- a/tests/ci/sync_pr.py +++ b/tests/ci/sync_pr.py @@ -5,12 +5,12 @@ import argparse import sys -from ci_config import StatusNames from commit_status_helper import get_commit, post_commit_status from get_robot_token import get_best_robot_token from github_helper import GitHub from pr_info import PRInfo from report import SUCCESS +from ci_config import CI def parse_args() -> argparse.Namespace: @@ -75,7 +75,7 @@ def set_sync_status(gh, pr_info, sync_pr): if sync_pr.mergeable_state == "clean": print(f"Sync PR [{sync_pr.number}] is clean") post_commit_status( - get_commit(gh, pr_info.sha), SUCCESS, "", "", StatusNames.SYNC + get_commit(gh, pr_info.sha), SUCCESS, "", "", CI.StatusNames.SYNC ) else: print( diff --git a/tests/ci/test_ci_cache.py b/tests/ci/test_ci_cache.py index b1be0709803..81d649b246b 100644 --- a/tests/ci/test_ci_cache.py +++ b/tests/ci/test_ci_cache.py @@ -5,12 +5,12 @@ from pathlib import Path import shutil from typing import Dict, Set import unittest -from ci_config import Build, JobNames from s3_helper import S3Helper from ci_cache import CiCache from digest_helper import JOB_DIGEST_LEN from commit_status_helper import CommitStatusData from env_helper import S3_BUILDS_BUCKET, TEMP_PATH +from ci_config import CI def _create_mock_digest_1(string): @@ -21,8 +21,8 @@ def _create_mock_digest_2(string): return md5((string + "+nonce").encode("utf-8")).hexdigest()[:JOB_DIGEST_LEN] -DIGESTS = {job: _create_mock_digest_1(job) for job in JobNames} -DIGESTS2 = {job: _create_mock_digest_2(job) for job in JobNames} +DIGESTS = {job: _create_mock_digest_1(job) for job in CI.JobNames} +DIGESTS2 = {job: _create_mock_digest_2(job) for job in CI.JobNames} # pylint:disable=protected-access @@ -84,8 +84,10 @@ class TestCiCache(unittest.TestCase): NUM_BATCHES = 10 DOCS_JOBS_NUM = 1 - assert len(set(job for job in JobNames)) == len(list(job for job in JobNames)) - NONDOCS_JOBS_NUM = len(set(job for job in JobNames)) - DOCS_JOBS_NUM + assert len(set(job for job in CI.JobNames)) == len( + list(job for job in CI.JobNames) + ) + NONDOCS_JOBS_NUM = len(set(job for job in CI.JobNames)) - DOCS_JOBS_NUM PR_NUM = 123456 status = CommitStatusData( @@ -97,13 +99,13 @@ class TestCiCache(unittest.TestCase): ) ### add some pending statuses for two batches, non-release branch - for job in JobNames: + for job in CI.JobNames: ci_cache.push_pending(job, [0, 1, 2], NUM_BATCHES, release_branch=False) ci_cache_2.push_pending(job, [0, 1, 2], NUM_BATCHES, release_branch=False) ### add success status for 0 batch, non-release branch batch = 0 - for job in JobNames: + for job in CI.JobNames: ci_cache.push_successful( job, batch, NUM_BATCHES, status, release_branch=False ) @@ -113,21 +115,17 @@ class TestCiCache(unittest.TestCase): ### add failed status for 2 batch, non-release branch batch = 2 - for job in JobNames: + for job in CI.JobNames: ci_cache.push_failed(job, batch, NUM_BATCHES, status, release_branch=False) ci_cache_2.push_failed( job, batch, NUM_BATCHES, status, release_branch=False ) ### check all expected directories were created on s3 mock - expected_build_path_1 = f"{CiCache.JobType.SRCS.value}-{_create_mock_digest_1(Build.PACKAGE_RELEASE)}" - expected_docs_path_1 = ( - f"{CiCache.JobType.DOCS.value}-{_create_mock_digest_1(JobNames.DOCS_CHECK)}" - ) - expected_build_path_2 = f"{CiCache.JobType.SRCS.value}-{_create_mock_digest_2(Build.PACKAGE_RELEASE)}" - expected_docs_path_2 = ( - f"{CiCache.JobType.DOCS.value}-{_create_mock_digest_2(JobNames.DOCS_CHECK)}" - ) + expected_build_path_1 = f"{CiCache.JobType.SRCS.value}-{_create_mock_digest_1(CI.BuildNames.PACKAGE_RELEASE)}" + expected_docs_path_1 = f"{CiCache.JobType.DOCS.value}-{_create_mock_digest_1(CI.JobNames.DOCS_CHECK)}" + expected_build_path_2 = f"{CiCache.JobType.SRCS.value}-{_create_mock_digest_2(CI.BuildNames.PACKAGE_RELEASE)}" + expected_docs_path_2 = f"{CiCache.JobType.DOCS.value}-{_create_mock_digest_2(CI.JobNames.DOCS_CHECK)}" self.assertCountEqual( list(s3_mock.files_on_s3_paths.keys()), [ @@ -174,7 +172,7 @@ class TestCiCache(unittest.TestCase): ) ### check statuses for all jobs in cache - for job in JobNames: + for job in CI.JobNames: self.assertEqual( ci_cache.is_successful(job, 0, NUM_BATCHES, release_branch=False), True ) @@ -212,7 +210,7 @@ class TestCiCache(unittest.TestCase): assert status2 is None ### add some more pending statuses for two batches and for a release branch - for job in JobNames: + for job in CI.JobNames: ci_cache.push_pending( job, batches=[0, 1], num_batches=NUM_BATCHES, release_branch=True ) @@ -226,7 +224,7 @@ class TestCiCache(unittest.TestCase): sha="deadbeaf2", pr_num=PR_NUM, ) - for job in JobNames: + for job in CI.JobNames: ci_cache.push_successful(job, 0, NUM_BATCHES, status, release_branch=True) ### check number of cache files is as expected @@ -249,7 +247,7 @@ class TestCiCache(unittest.TestCase): ) ### check statuses - for job in JobNames: + for job in CI.JobNames: self.assertEqual(ci_cache.is_successful(job, 0, NUM_BATCHES, False), True) self.assertEqual(ci_cache.is_successful(job, 0, NUM_BATCHES, True), True) self.assertEqual(ci_cache.is_successful(job, 1, NUM_BATCHES, False), False) @@ -273,7 +271,7 @@ class TestCiCache(unittest.TestCase): ### create new cache object and verify the same checks ci_cache = CiCache(s3_mock, DIGESTS) - for job in JobNames: + for job in CI.JobNames: self.assertEqual(ci_cache.is_successful(job, 0, NUM_BATCHES, False), True) self.assertEqual(ci_cache.is_successful(job, 0, NUM_BATCHES, True), True) self.assertEqual(ci_cache.is_successful(job, 1, NUM_BATCHES, False), False) diff --git a/tests/ci/test_ci_config.py b/tests/ci/test_ci_config.py index badbc4c5dcf..7a51a65b5d5 100644 --- a/tests/ci/test_ci_config.py +++ b/tests/ci/test_ci_config.py @@ -1,30 +1,460 @@ #!/usr/bin/env python3 import unittest -from ci_config import CIStages, JobNames, CI_CONFIG, Runners +from ci_config import CI +import ci as CIPY +from ci_settings import CiSettings +from pr_info import PRInfo, EventType +from s3_helper import S3Helper +from ci_cache import CiCache +from ci_utils import normalize_string + + +_TEST_EVENT_JSON = {"dummy": "dummy"} + +# pylint:disable=protected-access,union-attr class TestCIConfig(unittest.TestCase): def test_runner_config(self): """check runner is provided w/o exception""" - for job in JobNames: - runner = CI_CONFIG.get_runner_type(job) - self.assertIn(runner, Runners) + for job in CI.JobNames: + self.assertIn(CI.JOB_CONFIGS[job].runner_type, CI.Runners) + if ( + job + in ( + CI.JobNames.STYLE_CHECK, + CI.JobNames.BUILD_CHECK, + ) + or "jepsen" in job.lower() + ): + self.assertTrue( + "style" in CI.JOB_CONFIGS[job].runner_type, + f"Job [{job}] must have style-checker(-aarch64) runner", + ) + elif "binary_" in job.lower() or "package_" in job.lower(): + self.assertTrue( + CI.JOB_CONFIGS[job].runner_type == CI.Runners.BUILDER, + f"Job [{job}] must have [{CI.Runners.BUILDER}] runner", + ) + elif "aarch64" in job.lower(): + self.assertTrue( + "aarch" in CI.JOB_CONFIGS[job].runner_type, + f"Job [{job}] does not match runner [{CI.JOB_CONFIGS[job].runner_type}]", + ) + else: + self.assertTrue( + "aarch" not in CI.JOB_CONFIGS[job].runner_type, + f"Job [{job}] does not match runner [{CI.JOB_CONFIGS[job].runner_type}]", + ) + + def test_common_configs_applied_properly(self): + for job in CI.JobNames: + if CI.JOB_CONFIGS[job].job_name_keyword: + self.assertTrue( + CI.JOB_CONFIGS[job].job_name_keyword.lower() + in normalize_string(job), + f"Job [{job}] apparently uses wrong common config with job keyword [{CI.JOB_CONFIGS[job].job_name_keyword}]", + ) + + def test_required_checks(self): + for job in CI.REQUIRED_CHECKS: + if job in (CI.StatusNames.PR_CHECK, CI.StatusNames.SYNC): + continue + self.assertTrue(job in CI.JOB_CONFIGS, f"Job [{job}] not in job config") + + def test_builds_configs(self): + """build name in the build config must match the job name""" + for job in CI.JobNames: + self.assertTrue(job in CI.JOB_CONFIGS) + self.assertTrue(CI.JOB_CONFIGS[job].runner_type in CI.Runners) + if job in CI.BuildNames: + self.assertTrue(CI.JOB_CONFIGS[job].build_config.name == job) + self.assertTrue(CI.JOB_CONFIGS[job].required_builds is None) + else: + self.assertTrue(CI.JOB_CONFIGS[job].build_config is None) + if "asan" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_ASAN, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "msan" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_MSAN, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "tsan" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_TSAN, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "ubsan" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_UBSAN, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "debug" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_DEBUG, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "release" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + in ( + CI.BuildNames.PACKAGE_RELEASE, + CI.BuildNames.BINARY_RELEASE, + ), + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "coverage" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_RELEASE_COVERAGE, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "aarch" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_AARCH64, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "amd64" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_RELEASE, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "uzzer" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] == CI.BuildNames.FUZZERS, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "Docker" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + in ( + CI.BuildNames.PACKAGE_RELEASE, + CI.BuildNames.PACKAGE_AARCH64, + ), + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "SQLTest" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + == CI.BuildNames.PACKAGE_RELEASE, + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif "Jepsen" in job: + self.assertTrue( + CI.JOB_CONFIGS[job].required_builds[0] + in ( + CI.BuildNames.PACKAGE_RELEASE, + CI.BuildNames.BINARY_RELEASE, + ), + f"Job [{job}] probably has wrong required build [{CI.JOB_CONFIGS[job].required_builds[0]}] in JobConfig", + ) + elif job in ( + CI.JobNames.STYLE_CHECK, + CI.JobNames.FAST_TEST, + CI.JobNames.BUILD_CHECK, + CI.JobNames.DOCS_CHECK, + CI.JobNames.BUGFIX_VALIDATE, + ): + self.assertTrue(CI.JOB_CONFIGS[job].required_builds is None) + else: + print(f"Job [{job}] required build not checked") def test_job_stage_config(self): - """check runner is provided w/o exception""" - for job in JobNames: - stage = CI_CONFIG.get_job_ci_stage(job) - if job in [ - JobNames.STYLE_CHECK, - JobNames.FAST_TEST, - JobNames.JEPSEN_KEEPER, - JobNames.BUILD_CHECK, - JobNames.BUILD_CHECK_SPECIAL, - ]: - assert ( - stage == CIStages.NA - ), "These jobs are not in CI stages, must be NA" + """ + check runner is provided w/o exception + """ + # check stages + for job in CI.JobNames: + if job in CI.BuildNames: + self.assertTrue( + CI.get_job_ci_stage(job) + in (CI.WorkflowStages.BUILDS_1, CI.WorkflowStages.BUILDS_2) + ) else: - assert stage != CIStages.NA, f"stage not found for [{job}]" - self.assertIn(stage, CIStages) + if job in ( + CI.JobNames.STYLE_CHECK, + CI.JobNames.FAST_TEST, + CI.JobNames.JEPSEN_SERVER, + CI.JobNames.JEPSEN_KEEPER, + CI.JobNames.BUILD_CHECK, + ): + self.assertEqual( + CI.get_job_ci_stage(job), + CI.WorkflowStages.NA, + msg=f"Stage for [{job}] is not correct", + ) + else: + self.assertTrue( + CI.get_job_ci_stage(job) + in (CI.WorkflowStages.TESTS_1, CI.WorkflowStages.TESTS_3), + msg=f"Stage for [{job}] is not correct", + ) + + def test_build_jobs_configs(self): + """ + check build jobs have non-None build_config attribute + check test jobs have None build_config attribute + """ + for job in CI.JobNames: + if job in CI.BuildNames: + self.assertTrue( + isinstance(CI.JOB_CONFIGS[job].build_config, CI.BuildConfig) + ) + else: + self.assertTrue(CI.JOB_CONFIGS[job].build_config is None) + + def test_ci_py_for_pull_request(self): + """ + checks ci.py job configuration + """ + settings = CiSettings() + settings.no_ci_cache = True + settings.ci_sets = [CI.Tags.CI_SET_BUILDS] + settings.include_keywords = [ + "package", + "integration", + "upgrade", + "clickHouse_build_check", + "stateless", + ] + settings.exclude_keywords = ["asan", "aarch64"] + pr_info = PRInfo(github_event=_TEST_EVENT_JSON) + # make it pull request info + pr_info.event_type = EventType.PULL_REQUEST + pr_info.number = 12345 + assert pr_info.is_pr and not pr_info.is_release and not pr_info.is_master + assert not pr_info.is_merge_queue + ci_cache = CIPY._configure_jobs( + S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True + ) + actual_jobs_to_do = list(ci_cache.jobs_to_do) + expected_jobs_to_do = [] + for set_ in settings.ci_sets: + tag_config = CI.get_tag_config(set_) + assert tag_config + set_jobs = tag_config.run_jobs + for job in set_jobs: + if any(k in normalize_string(job) for k in settings.exclude_keywords): + continue + expected_jobs_to_do.append(job) + for job, config in CI.JOB_CONFIGS.items(): + if not any( + keyword in normalize_string(job) + for keyword in settings.include_keywords + ): + continue + if any( + keyword in normalize_string(job) + for keyword in settings.exclude_keywords + ): + continue + if config.random_bucket: + continue + if job not in expected_jobs_to_do: + expected_jobs_to_do.append(job) + + random_buckets = [] + for job, config in ci_cache.jobs_to_do.items(): + if config.random_bucket: + self.assertTrue( + config.random_bucket not in random_buckets, + "Only one job must be picked up from each random bucket", + ) + random_buckets.append(config.random_bucket) + actual_jobs_to_do.remove(job) + + self.assertCountEqual(expected_jobs_to_do, actual_jobs_to_do) + + def test_ci_py_for_pull_request_no_settings(self): + """ + checks ci.py job configuration in PR with empty ci_settings + """ + settings = CiSettings() + settings.no_ci_cache = True + pr_info = PRInfo(github_event=_TEST_EVENT_JSON) + # make it pull request info + pr_info.event_type = EventType.PULL_REQUEST + pr_info.number = 12345 + assert pr_info.is_pr and not pr_info.is_release and not pr_info.is_master + assert not pr_info.is_merge_queue + ci_cache = CIPY._configure_jobs( + S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True + ) + actual_jobs_to_do = list(ci_cache.jobs_to_do) + expected_jobs_to_do = [] + for job, config in CI.JOB_CONFIGS.items(): + if config.random_bucket: + continue + if config.release_only: + continue + if config.run_by_label: + continue + expected_jobs_to_do.append(job) + + random_buckets = [] + for job, config in ci_cache.jobs_to_do.items(): + if config.random_bucket: + self.assertTrue( + config.random_bucket not in random_buckets, + "Only one job must be picked up from each random bucket", + ) + random_buckets.append(config.random_bucket) + actual_jobs_to_do.remove(job) + + self.assertCountEqual(expected_jobs_to_do, actual_jobs_to_do) + + def test_ci_py_for_master(self): + """ + checks ci.py job configuration + """ + settings = CiSettings() + settings.no_ci_cache = True + pr_info = PRInfo(github_event=_TEST_EVENT_JSON) + pr_info.event_type = EventType.PUSH + assert pr_info.number == 0 and pr_info.is_release and not pr_info.is_merge_queue + ci_cache = CIPY._configure_jobs( + S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True + ) + actual_jobs_to_do = list(ci_cache.jobs_to_do) + expected_jobs_to_do = [] + for job, config in CI.JOB_CONFIGS.items(): + if config.pr_only: + continue + if config.run_by_label: + continue + if job in CI.MQ_JOBS: + continue + expected_jobs_to_do.append(job) + self.assertCountEqual(expected_jobs_to_do, actual_jobs_to_do) + + def test_ci_py_for_merge_queue(self): + """ + checks ci.py job configuration + """ + settings = CiSettings() + settings.no_ci_cache = True + pr_info = PRInfo(github_event=_TEST_EVENT_JSON) + # make it merge_queue + pr_info.event_type = EventType.MERGE_QUEUE + assert ( + pr_info.number == 0 + and pr_info.is_merge_queue + and not pr_info.is_release + and not pr_info.is_master + and not pr_info.is_pr + ) + ci_cache = CIPY._configure_jobs( + S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True + ) + actual_jobs_to_do = list(ci_cache.jobs_to_do) + expected_jobs_to_do = [ + "Style check", + "Fast test", + "binary_release", + "Unit tests (release)", + ] + self.assertCountEqual(expected_jobs_to_do, actual_jobs_to_do) + + def test_ci_py_await(self): + """ + checks ci.py job configuration + """ + settings = CiSettings() + settings.no_ci_cache = True + pr_info = PRInfo(github_event=_TEST_EVENT_JSON) + pr_info.event_type = EventType.PUSH + pr_info.number = 0 + assert pr_info.is_release and not pr_info.is_merge_queue + ci_cache = CIPY._configure_jobs( + S3Helper(), pr_info, settings, skip_jobs=False, dry_run=True + ) + self.assertTrue(not ci_cache.jobs_to_skip, "Must be no jobs in skip list") + all_jobs_in_wf = list(ci_cache.jobs_to_do) + assert not ci_cache.jobs_to_wait + ci_cache.await_pending_jobs(is_release=pr_info.is_release, dry_run=True) + assert not ci_cache.jobs_to_skip + assert not ci_cache.jobs_to_wait + + # pretend there are pending jobs that we neet to wait + ci_cache.jobs_to_wait = dict(ci_cache.jobs_to_do) + for job, config in ci_cache.jobs_to_wait.items(): + assert not config.pending_batches + assert config.batches + config.pending_batches = list(config.batches) + for job, config in ci_cache.jobs_to_wait.items(): + for batch in range(config.num_batches): + record = CiCache.Record( + record_type=CiCache.RecordType.PENDING, + job_name=job, + job_digest=ci_cache.job_digests[job], + batch=batch, + num_batches=config.num_batches, + release_branch=True, + ) + for record_t_, records_ in ci_cache.records.items(): + if record_t_.value == CiCache.RecordType.PENDING.value: + records_[record.to_str_key()] = record + + def _test_await_for_batch( + ci_cache: CiCache, record_type: CiCache.RecordType, batch: int + ) -> None: + assert ci_cache.jobs_to_wait + for job_, config_ in ci_cache.jobs_to_wait.items(): + record = CiCache.Record( + record_type=record_type, + job_name=job_, + job_digest=ci_cache.job_digests[job_], + batch=batch, + num_batches=config_.num_batches, + release_branch=True, + ) + for record_t_, records_ in ci_cache.records.items(): + if record_t_.value == record_type.value: + records_[record.to_str_key()] = record + # await + ci_cache.await_pending_jobs(is_release=pr_info.is_release, dry_run=True) + for _, config_ in ci_cache.jobs_to_wait.items(): + assert config_.pending_batches + if ( + record_type != CiCache.RecordType.PENDING + and batch < config_.num_batches + ): + assert batch not in config_.pending_batches + else: + assert batch in config_.pending_batches + + for _, config_ in ci_cache.jobs_to_do.items(): + # jobs to do must have batches to run before/after await + # if it's an empty list after await - apparently job has not been removed after await + assert config_.batches + + _test_await_for_batch(ci_cache, CiCache.RecordType.SUCCESSFUL, 0) + # check all one-batch jobs are in jobs_to_skip + for job in all_jobs_in_wf: + config = CI.JOB_CONFIGS[job] + if config.num_batches == 1: + self.assertTrue(job in ci_cache.jobs_to_skip) + self.assertTrue(job not in ci_cache.jobs_to_do) + else: + self.assertTrue(job not in ci_cache.jobs_to_skip) + self.assertTrue(job in ci_cache.jobs_to_do) + + _test_await_for_batch(ci_cache, CiCache.RecordType.FAILED, 1) + _test_await_for_batch(ci_cache, CiCache.RecordType.SUCCESSFUL, 2) + + self.assertTrue(len(ci_cache.jobs_to_skip) > 0) + self.assertTrue(len(ci_cache.jobs_to_do) > 0) + self.assertCountEqual( + list(ci_cache.jobs_to_do) + ci_cache.jobs_to_skip, all_jobs_in_wf + ) diff --git a/tests/ci/test_ci_options.py b/tests/ci/test_ci_options.py index 60888932803..fc21c7dda4e 100644 --- a/tests/ci/test_ci_options.py +++ b/tests/ci/test_ci_options.py @@ -4,7 +4,7 @@ import unittest from ci_settings import CiSettings -from ci_config import JobConfig +from ci_config import CI _TEST_BODY_1 = """ #### Run only: @@ -64,8 +64,8 @@ _TEST_JOB_LIST = [ "fuzzers", "Docker server image", "Docker keeper image", - "Install packages (amd64)", - "Install packages (arm64)", + "Install packages (release)", + "Install packages (aarch64)", "Stateless tests (debug)", "Stateless tests (release)", "Stateless tests (coverage)", @@ -120,15 +120,15 @@ _TEST_JOB_LIST = [ "AST fuzzer (ubsan)", "ClickHouse Keeper Jepsen", "ClickHouse Server Jepsen", - "Performance Comparison", - "Performance Comparison Aarch64", + "Performance Comparison (release)", + "Performance Comparison (aarch64)", "Sqllogic test (release)", "SQLancer (release)", "SQLancer (debug)", "SQLTest", - "Compatibility check (amd64)", + "Compatibility check (release)", "Compatibility check (aarch64)", - "ClickBench (amd64)", + "ClickBench (release)", "ClickBench (aarch64)", "libFuzzer tests", "ClickHouse build check", @@ -166,7 +166,10 @@ class TestCIOptions(unittest.TestCase): ["tsan", "foobar", "aarch64", "analyzer", "s3_storage", "coverage"], ) - jobs_configs = {job: JobConfig() for job in _TEST_JOB_LIST} + jobs_configs = { + job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER) + for job in _TEST_JOB_LIST + } jobs_configs[ "fuzzers" ].run_by_label = ( @@ -210,7 +213,10 @@ class TestCIOptions(unittest.TestCase): ) def test_options_applied_2(self): - jobs_configs = {job: JobConfig() for job in _TEST_JOB_LIST_2} + jobs_configs = { + job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER) + for job in _TEST_JOB_LIST_2 + } jobs_configs["Style check"].release_only = True jobs_configs["Fast test"].pr_only = True jobs_configs["fuzzers"].run_by_label = "TEST_LABEL" @@ -252,7 +258,10 @@ class TestCIOptions(unittest.TestCase): def test_options_applied_3(self): ci_settings = CiSettings() ci_settings.include_keywords = ["Style"] - jobs_configs = {job: JobConfig() for job in _TEST_JOB_LIST_2} + jobs_configs = { + job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER) + for job in _TEST_JOB_LIST_2 + } jobs_configs["Style check"].release_only = True jobs_configs["Fast test"].pr_only = True # no settings are set @@ -296,7 +305,10 @@ class TestCIOptions(unittest.TestCase): ) self.assertCountEqual(ci_options.include_keywords, ["analyzer"]) self.assertIsNone(ci_options.exclude_keywords) - jobs_configs = {job: JobConfig() for job in _TEST_JOB_LIST} + jobs_configs = { + job: CI.JobConfig(runner_type=CI.Runners.STYLE_CHECKER) + for job in _TEST_JOB_LIST + } jobs_configs[ "fuzzers" ].run_by_label = "TEST_LABEL" # check "fuzzers" does not appears in the result diff --git a/tests/integration/test_asynchronous_metric_jemalloc_profile_active/__init__.py b/tests/integration/test_asynchronous_metric_jemalloc_profile_active/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/integration/test_asynchronous_metric_jemalloc_profile_active/configs/asynchronous_metrics_update_period_s.xml b/tests/integration/test_asynchronous_metric_jemalloc_profile_active/configs/asynchronous_metrics_update_period_s.xml new file mode 100644 index 00000000000..47e88730482 --- /dev/null +++ b/tests/integration/test_asynchronous_metric_jemalloc_profile_active/configs/asynchronous_metrics_update_period_s.xml @@ -0,0 +1,3 @@ + + 1 + diff --git a/tests/integration/test_asynchronous_metric_jemalloc_profile_active/test.py b/tests/integration/test_asynchronous_metric_jemalloc_profile_active/test.py new file mode 100644 index 00000000000..a8f4ab05888 --- /dev/null +++ b/tests/integration/test_asynchronous_metric_jemalloc_profile_active/test.py @@ -0,0 +1,73 @@ +import time + +import pytest +from helpers.cluster import ClickHouseCluster + +cluster = ClickHouseCluster(__file__) +node1 = cluster.add_instance( + "node1", + main_configs=["configs/asynchronous_metrics_update_period_s.xml"], + env_variables={"MALLOC_CONF": "background_thread:true,prof:true"}, +) + + +@pytest.fixture(scope="module") +def started_cluster(): + try: + cluster.start() + yield cluster + + finally: + cluster.shutdown() + + +# asynchronous metrics are updated once every 60s by default. To make the test run faster, the setting +# asynchronous_metric_update_period_s is being set to 1s so that the metrics are populated faster and +# are available for querying during the test. +def test_asynchronous_metric_jemalloc_profile_active(started_cluster): + # default open + if node1.is_built_with_sanitizer(): + pytest.skip("Disabled for sanitizers") + + res_o = node1.query( + "SELECT * FROM system.asynchronous_metrics WHERE metric ILIKE '%jemalloc.prof.active%' FORMAT Vertical;" + ) + assert ( + res_o + == """Row 1: +────── +metric: jemalloc.prof.active +value: 1 +description: An internal metric of the low-level memory allocator (jemalloc). See https://jemalloc.net/jemalloc.3.html +""" + ) + # disable + node1.query("SYSTEM JEMALLOC DISABLE PROFILE") + time.sleep(5) + res_t = node1.query( + "SELECT * FROM system.asynchronous_metrics WHERE metric ILIKE '%jemalloc.prof.active%' FORMAT Vertical;" + ) + assert ( + res_t + == """Row 1: +────── +metric: jemalloc.prof.active +value: 0 +description: An internal metric of the low-level memory allocator (jemalloc). See https://jemalloc.net/jemalloc.3.html +""" + ) + # enable + node1.query("SYSTEM JEMALLOC ENABLE PROFILE") + time.sleep(5) + res_f = node1.query( + "SELECT * FROM system.asynchronous_metrics WHERE metric ILIKE '%jemalloc.prof.active%' FORMAT Vertical;" + ) + assert ( + res_f + == """Row 1: +────── +metric: jemalloc.prof.active +value: 1 +description: An internal metric of the low-level memory allocator (jemalloc). See https://jemalloc.net/jemalloc.3.html +""" + ) diff --git a/tests/integration/test_checking_s3_blobs_paranoid/test.py b/tests/integration/test_checking_s3_blobs_paranoid/test.py index 476f7c61b28..b3b8bf86800 100644 --- a/tests/integration/test_checking_s3_blobs_paranoid/test.py +++ b/tests/integration/test_checking_s3_blobs_paranoid/test.py @@ -512,6 +512,7 @@ def test_when_s3_connection_reset_by_peer_at_create_mpu_retried( ), error +@pytest.mark.skip(reason="test is flaky, waiting ClickHouse/issues/64451") def test_query_is_canceled_with_inf_retries(cluster, broken_s3): node = cluster.instances["node_with_inf_s3_retries"] diff --git a/tests/integration/test_http_handlers_config/test.py b/tests/integration/test_http_handlers_config/test.py index f6ac42a2db2..b2efbf4bb65 100644 --- a/tests/integration/test_http_handlers_config/test.py +++ b/tests/integration/test_http_handlers_config/test.py @@ -88,6 +88,11 @@ def test_dynamic_query_handler(): "application/whatever; charset=cp1337" == res_custom_ct.headers["content-type"] ) + assert "it works" == res_custom_ct.headers["X-Test-Http-Response-Headers-Works"] + assert ( + "also works" + == res_custom_ct.headers["X-Test-Http-Response-Headers-Even-Multiple"] + ) def test_predefined_query_handler(): @@ -146,6 +151,10 @@ def test_predefined_query_handler(): ) assert b"max_final_threads\t1\nmax_threads\t1\n" == res2.content assert "application/generic+one" == res2.headers["content-type"] + assert "it works" == res2.headers["X-Test-Http-Response-Headers-Works"] + assert ( + "also works" == res2.headers["X-Test-Http-Response-Headers-Even-Multiple"] + ) cluster.instance.query( "CREATE TABLE test_table (id UInt32, data String) Engine=TinyLog" @@ -212,6 +221,18 @@ def test_fixed_static_handler(): "test_get_fixed_static_handler", method="GET", headers={"XXX": "xxx"} ).content ) + assert ( + "it works" + == cluster.instance.http_request( + "test_get_fixed_static_handler", method="GET", headers={"XXX": "xxx"} + ).headers["X-Test-Http-Response-Headers-Works"] + ) + assert ( + "also works" + == cluster.instance.http_request( + "test_get_fixed_static_handler", method="GET", headers={"XXX": "xxx"} + ).headers["X-Test-Http-Response-Headers-Even-Multiple"] + ) def test_config_static_handler(): diff --git a/tests/integration/test_http_handlers_config/test_dynamic_handler/config.xml b/tests/integration/test_http_handlers_config/test_dynamic_handler/config.xml index c9b61c21507..58fedbd9078 100644 --- a/tests/integration/test_http_handlers_config/test_dynamic_handler/config.xml +++ b/tests/integration/test_http_handlers_config/test_dynamic_handler/config.xml @@ -18,6 +18,10 @@ dynamic_query_handler get_dynamic_handler_query application/whatever; charset=cp1337 + + it works + also works + diff --git a/tests/integration/test_http_handlers_config/test_predefined_handler/config.xml b/tests/integration/test_http_handlers_config/test_predefined_handler/config.xml index 1b8ddfab323..a7804721f12 100644 --- a/tests/integration/test_http_handlers_config/test_predefined_handler/config.xml +++ b/tests/integration/test_http_handlers_config/test_predefined_handler/config.xml @@ -19,6 +19,10 @@ predefined_query_handler SELECT name, value FROM system.settings WHERE name = {setting_name_1:String} OR name = {setting_name_2:String} application/generic+one + + it works + also works + diff --git a/tests/integration/test_http_handlers_config/test_static_handler/config.xml b/tests/integration/test_http_handlers_config/test_static_handler/config.xml index ff24e6dec96..76c1f588853 100644 --- a/tests/integration/test_http_handlers_config/test_static_handler/config.xml +++ b/tests/integration/test_http_handlers_config/test_static_handler/config.xml @@ -12,6 +12,10 @@ 402 text/html; charset=UTF-8 Test get static handler and fix content + + it works + also works + diff --git a/tests/queries/0_stateless/00098_k_union_all.sql b/tests/queries/0_stateless/00098_k_union_all.sql index 059d27075d7..280ad5ca2e6 100644 --- a/tests/queries/0_stateless/00098_k_union_all.sql +++ b/tests/queries/0_stateless/00098_k_union_all.sql @@ -1,4 +1,5 @@ SET output_format_pretty_color=1; +SET output_format_pretty_display_footer_column_names=0; SELECT 1 FORMAT PrettySpace; SELECT 1 UNION ALL SELECT 1 FORMAT PrettySpace; SELECT 1 UNION ALL SELECT 1 UNION ALL SELECT 1 FORMAT PrettySpace; diff --git a/tests/queries/0_stateless/00405_output_format_pretty_color.sql b/tests/queries/0_stateless/00405_output_format_pretty_color.sql index bc2d0c3adbf..de83567dd5d 100644 --- a/tests/queries/0_stateless/00405_output_format_pretty_color.sql +++ b/tests/queries/0_stateless/00405_output_format_pretty_color.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SET output_format_pretty_color = 0; SHOW SETTING output_format_pretty_color; diff --git a/tests/queries/0_stateless/00405_pretty_formats.sql b/tests/queries/0_stateless/00405_pretty_formats.sql index 00bb09a1c30..4715f4fc110 100644 --- a/tests/queries/0_stateless/00405_pretty_formats.sql +++ b/tests/queries/0_stateless/00405_pretty_formats.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SET output_format_pretty_color = 1; SELECT number AS hello, toString(number) AS world, (hello, world) AS tuple, nullIf(hello % 3, 0) AS sometimes_nulls FROM system.numbers LIMIT 10 SETTINGS max_block_size = 5 FORMAT Pretty; diff --git a/tests/queries/0_stateless/00476_pretty_formats_and_widths.sql b/tests/queries/0_stateless/00476_pretty_formats_and_widths.sql index 4bace207fb5..59809841726 100644 --- a/tests/queries/0_stateless/00476_pretty_formats_and_widths.sql +++ b/tests/queries/0_stateless/00476_pretty_formats_and_widths.sql @@ -1,4 +1,4 @@ -SET output_format_pretty_color=1, output_format_pretty_highlight_digit_groups=0; +SET output_format_pretty_color=1, output_format_pretty_highlight_digit_groups=0, output_format_pretty_display_footer_column_names=0; SELECT toUInt64(round(exp10(number))) AS x, toString(x) AS s FROM system.numbers LIMIT 10 FORMAT Pretty; SELECT toUInt64(round(exp10(number))) AS x, toString(x) AS s FROM system.numbers LIMIT 10 FORMAT PrettyCompact; SELECT toUInt64(round(exp10(number))) AS x, toString(x) AS s FROM system.numbers LIMIT 10 FORMAT PrettySpace; diff --git a/tests/queries/0_stateless/00569_parse_date_time_best_effort.sql b/tests/queries/0_stateless/00569_parse_date_time_best_effort.sql index 511addb4e4d..ca423c1922a 100644 --- a/tests/queries/0_stateless/00569_parse_date_time_best_effort.sql +++ b/tests/queries/0_stateless/00569_parse_date_time_best_effort.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SELECT s, parseDateTimeBestEffortOrNull(s, 'UTC') AS a, diff --git a/tests/queries/0_stateless/00695_pretty_max_column_pad_width.sql b/tests/queries/0_stateless/00695_pretty_max_column_pad_width.sql index 9a421361b71..24dd9336a88 100644 --- a/tests/queries/0_stateless/00695_pretty_max_column_pad_width.sql +++ b/tests/queries/0_stateless/00695_pretty_max_column_pad_width.sql @@ -1,2 +1,3 @@ +SET output_format_pretty_display_footer_column_names=0; SET output_format_pretty_max_column_pad_width = 250; SELECT range(number) FROM system.numbers LIMIT 100 FORMAT PrettyCompactNoEscapes; diff --git a/tests/queries/0_stateless/00813_parse_date_time_best_effort_more.sql b/tests/queries/0_stateless/00813_parse_date_time_best_effort_more.sql index 1e3b24e60c0..7c2b75cdd70 100644 --- a/tests/queries/0_stateless/00813_parse_date_time_best_effort_more.sql +++ b/tests/queries/0_stateless/00813_parse_date_time_best_effort_more.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SELECT s, parseDateTimeBestEffortOrNull(s, 'UTC') AS a, diff --git a/tests/queries/0_stateless/01019_alter_materialized_view_consistent.sh b/tests/queries/0_stateless/01019_alter_materialized_view_consistent.sh index 3a2eac1f38f..26c2e54e637 100755 --- a/tests/queries/0_stateless/01019_alter_materialized_view_consistent.sh +++ b/tests/queries/0_stateless/01019_alter_materialized_view_consistent.sh @@ -54,10 +54,10 @@ function alter_thread() { for i in {0..5}; do ALTER[$i]="ALTER TABLE mv MODIFY QUERY SELECT v == 1 as test, v as case FROM src_a;" done - # Insert 3 ALTERs to src_b, one in the first half of the array and two in arbitrary positions. - ALTER[$RANDOM % 3]="ALTER TABLE mv MODIFY QUERY SELECT v == 2 as test, v as case FROM src_b;" - ALTER[$RANDOM % 6]="ALTER TABLE mv MODIFY QUERY SELECT v == 2 as test, v as case FROM src_b;" - ALTER[$RANDOM % 6]="ALTER TABLE mv MODIFY QUERY SELECT v == 2 as test, v as case FROM src_b;" + # Insert 3 ALTERs to src_b randomly in each third of array. + ALTER[$RANDOM % 2]="ALTER TABLE mv MODIFY QUERY SELECT v == 2 as test, v as case FROM src_b;" + ALTER[$RANDOM % 2 + 2]="ALTER TABLE mv MODIFY QUERY SELECT v == 2 as test, v as case FROM src_b;" + ALTER[$RANDOM % 2 + 4]="ALTER TABLE mv MODIFY QUERY SELECT v == 2 as test, v as case FROM src_b;" i=0 while true; do diff --git a/tests/queries/0_stateless/01246_buffer_flush.sql b/tests/queries/0_stateless/01246_buffer_flush.sql index 36bcaae383f..66f93371c29 100644 --- a/tests/queries/0_stateless/01246_buffer_flush.sql +++ b/tests/queries/0_stateless/01246_buffer_flush.sql @@ -9,14 +9,14 @@ create table data_01256 as system.numbers Engine=Memory(); select 'min'; create table buffer_01256 as system.numbers Engine=Buffer(currentDatabase(), data_01256, 1, - 2, 100, /* time */ + 5, 100, /* time */ 4, 100, /* rows */ 1, 1e6 /* bytes */ ); insert into buffer_01256 select * from system.numbers limit 5; select count() from data_01256; --- sleep 2 (min time) + 1 (round up) + bias (1) = 4 -select sleepEachRow(2) from numbers(2) FORMAT Null; +-- It is enough to ensure that the buffer will be flushed earlier then 2*min_time (10 sec) +select sleepEachRow(9) FORMAT Null SETTINGS function_sleep_max_microseconds_per_block=10e6; select count() from data_01256; drop table buffer_01256; diff --git a/tests/queries/0_stateless/01293_pretty_max_value_width.sql b/tests/queries/0_stateless/01293_pretty_max_value_width.sql index a8e0f19f58e..f1dc0cd1912 100644 --- a/tests/queries/0_stateless/01293_pretty_max_value_width.sql +++ b/tests/queries/0_stateless/01293_pretty_max_value_width.sql @@ -1,4 +1,5 @@ SET output_format_pretty_color = 1, output_format_pretty_max_value_width_apply_for_single_value = 1, output_format_pretty_row_numbers = 0; +SET output_format_pretty_display_footer_column_names=0; SELECT 'привет' AS x, 'мир' AS y FORMAT Pretty; SET output_format_pretty_max_value_width = 5; diff --git a/tests/queries/0_stateless/01351_parse_date_time_best_effort_us.sql b/tests/queries/0_stateless/01351_parse_date_time_best_effort_us.sql index ca3166142f0..f83d5d96384 100644 --- a/tests/queries/0_stateless/01351_parse_date_time_best_effort_us.sql +++ b/tests/queries/0_stateless/01351_parse_date_time_best_effort_us.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SELECT 'parseDateTimeBestEffortUS'; SELECT diff --git a/tests/queries/0_stateless/01509_output_format_pretty_row_numbers.sql b/tests/queries/0_stateless/01509_output_format_pretty_row_numbers.sql index 0a09aef7fb2..04d02b8d389 100644 --- a/tests/queries/0_stateless/01509_output_format_pretty_row_numbers.sql +++ b/tests/queries/0_stateless/01509_output_format_pretty_row_numbers.sql @@ -1,5 +1,6 @@ SET output_format_pretty_color=1; SET output_format_pretty_row_numbers=0; +SET output_format_pretty_display_footer_column_names=0; SELECT * FROM numbers(10) FORMAT Pretty; SELECT * FROM numbers(10) FORMAT PrettyCompact; SELECT * FROM numbers(10) FORMAT PrettyCompactMonoBlock; diff --git a/tests/queries/0_stateless/01553_settings_early_apply.sql b/tests/queries/0_stateless/01553_settings_early_apply.sql index 4c168bdb3a5..821e09f6651 100644 --- a/tests/queries/0_stateless/01553_settings_early_apply.sql +++ b/tests/queries/0_stateless/01553_settings_early_apply.sql @@ -1,3 +1,4 @@ +set output_format_pretty_display_footer_column_names=0; set output_format_write_statistics=0; select * from numbers(100) settings max_result_rows = 1; -- { serverError TOO_MANY_ROWS_OR_BYTES } diff --git a/tests/queries/0_stateless/02375_pretty_formats.sql.j2 b/tests/queries/0_stateless/02375_pretty_formats.sql.j2 index 55462ea6b61..36a4479260c 100644 --- a/tests/queries/0_stateless/02375_pretty_formats.sql.j2 +++ b/tests/queries/0_stateless/02375_pretty_formats.sql.j2 @@ -3,6 +3,6 @@ 'PrettySpaceNoEscapesMonoBlock'] -%} select '{{ format }}'; -select number as x, number + 1 as y from numbers(4) settings max_block_size=2, output_format_pretty_color=1 format {{ format }}; +select number as x, number + 1 as y from numbers(4) settings max_block_size=2, output_format_pretty_color=1, output_format_pretty_display_footer_column_names=0 format {{ format }}; {% endfor -%} diff --git a/tests/queries/0_stateless/02381_parseDateTime64BestEffortUS.sql b/tests/queries/0_stateless/02381_parseDateTime64BestEffortUS.sql index 21dc7b1a990..510ed4b126f 100644 --- a/tests/queries/0_stateless/02381_parseDateTime64BestEffortUS.sql +++ b/tests/queries/0_stateless/02381_parseDateTime64BestEffortUS.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SELECT 'parseDateTime64BestEffortUS'; SELECT diff --git a/tests/queries/0_stateless/02798_explain_settings_not_applied_bug.sql b/tests/queries/0_stateless/02798_explain_settings_not_applied_bug.sql index 5b9976714ea..b0c08134816 100644 --- a/tests/queries/0_stateless/02798_explain_settings_not_applied_bug.sql +++ b/tests/queries/0_stateless/02798_explain_settings_not_applied_bug.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SET output_format_pretty_color=1; SET read_in_order_two_level_merge_threshold=1000000; diff --git a/tests/queries/0_stateless/02942_variant_cast.sql b/tests/queries/0_stateless/02942_variant_cast.sql index fc2d1d63657..33587e3e438 100644 --- a/tests/queries/0_stateless/02942_variant_cast.sql +++ b/tests/queries/0_stateless/02942_variant_cast.sql @@ -1,5 +1,4 @@ set allow_experimental_variant_type=1; -set allow_experimental_analyzer=0; -- It's currently doesn't work with analyzer because of the way it works with constants, but it will be refactored and fixed in future select NULL::Variant(String, UInt64); select 42::UInt64::Variant(String, UInt64); diff --git a/tests/queries/0_stateless/02944_variant_as_common_type.sql b/tests/queries/0_stateless/02944_variant_as_common_type.sql index e985cf365dd..49ea5f2769c 100644 --- a/tests/queries/0_stateless/02944_variant_as_common_type.sql +++ b/tests/queries/0_stateless/02944_variant_as_common_type.sql @@ -1,5 +1,3 @@ -set allow_experimental_analyzer=0; -- The result type for if function with constant is different with analyzer. It wil be fixed after refactoring around constants in analyzer. - set allow_experimental_variant_type=1; set use_variant_as_common_type=1; diff --git a/tests/queries/0_stateless/02998_pretty_format_print_readable_number_on_single_value.sql b/tests/queries/0_stateless/02998_pretty_format_print_readable_number_on_single_value.sql index 5dc69488cea..46d6bb657c9 100644 --- a/tests/queries/0_stateless/02998_pretty_format_print_readable_number_on_single_value.sql +++ b/tests/queries/0_stateless/02998_pretty_format_print_readable_number_on_single_value.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SELECT 1_000_000 as a FORMAT Pretty; SELECT 1_000_000 as a FORMAT PrettyNoEscapes; SELECT 1_000_000 as a FORMAT PrettyMonoBlock; diff --git a/tests/queries/0_stateless/03022_highlight_digit_groups.sql b/tests/queries/0_stateless/03022_highlight_digit_groups.sql index c48a02e712f..8c371c409c4 100644 --- a/tests/queries/0_stateless/03022_highlight_digit_groups.sql +++ b/tests/queries/0_stateless/03022_highlight_digit_groups.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SET output_format_pretty_row_numbers = 0; SELECT exp10(number) * (number % 2 ? 1 : -1) FROM numbers(30) FORMAT PrettySpace SETTINGS output_format_pretty_color = 1; diff --git a/tests/queries/0_stateless/03156_group_concat.reference b/tests/queries/0_stateless/03156_group_concat.reference new file mode 100644 index 00000000000..75b347be0c4 --- /dev/null +++ b/tests/queries/0_stateless/03156_group_concat.reference @@ -0,0 +1,14 @@ +0 95 abc [1,2,3] +1 \N a [993,986,979,972] +2 123 makson95 [] +95123 +abcamakson95 +[1,2,3][993,986,979,972][] +95,123 +abc,a,makson95 +[1,2,3],[993,986,979,972] +\N +951239512395123 +abc,a,makson95,abc,a,makson95,abc,a,makson95 +[1,2,3][993,986,979,972][][1,2,3][993,986,979,972][][1,2,3][993,986,979,972][] +488890 diff --git a/tests/queries/0_stateless/03156_group_concat.sql b/tests/queries/0_stateless/03156_group_concat.sql new file mode 100644 index 00000000000..c14fde8943a --- /dev/null +++ b/tests/queries/0_stateless/03156_group_concat.sql @@ -0,0 +1,40 @@ +DROP TABLE IF EXISTS test_groupConcat; +CREATE TABLE test_groupConcat +( + id UInt64, + p_int Int32 NULL, + p_string String, + p_array Array(Int32) +) ENGINE = MergeTree ORDER BY id; + +SET max_insert_threads = 1, max_threads = 1, min_insert_block_size_rows = 0, min_insert_block_size_bytes = 0; +INSERT INTO test_groupConcat VALUES (0, 95, 'abc', [1, 2, 3]), (1, NULL, 'a', [993, 986, 979, 972]), (2, 123, 'makson95', []); + +SELECT * FROM test_groupConcat; + +SELECT groupConcat(p_int) FROM test_groupConcat; +SELECT groupConcat(p_string) FROM test_groupConcat; +SELECT groupConcat(p_array) FROM test_groupConcat; + +SELECT groupConcat(',')(p_int) FROM test_groupConcat; +SELECT groupConcat(',')(p_string) FROM test_groupConcat; +SELECT groupConcat(',', 2)(p_array) FROM test_groupConcat; + +SELECT groupConcat(p_int) FROM test_groupConcat WHERE id = 1; + +INSERT INTO test_groupConcat VALUES (0, 95, 'abc', [1, 2, 3]), (1, NULL, 'a', [993, 986, 979, 972]), (2, 123, 'makson95', []); +INSERT INTO test_groupConcat VALUES (0, 95, 'abc', [1, 2, 3]), (1, NULL, 'a', [993, 986, 979, 972]), (2, 123, 'makson95', []); + +SELECT groupConcat(p_int) FROM test_groupConcat; +SELECT groupConcat(',')(p_string) FROM test_groupConcat; +SELECT groupConcat(p_array) FROM test_groupConcat; + +SELECT groupConcat(123)(number) FROM numbers(10); -- { serverError ILLEGAL_TYPE_OF_ARGUMENT } +SELECT groupConcat(',', '3')(number) FROM numbers(10); -- { serverError BAD_ARGUMENTS } +SELECT groupConcat(',', 0)(number) FROM numbers(10); -- { serverError BAD_ARGUMENTS } +SELECT groupConcat(',', -1)(number) FROM numbers(10); -- { serverError BAD_ARGUMENTS } +SELECT groupConcat(',', 3, 3)(number) FROM numbers(10); -- { serverError TOO_MANY_ARGUMENTS_FOR_FUNCTION } + +SELECT length(groupConcat(number)) FROM numbers(100000); + +DROP TABLE IF EXISTS test_groupConcat; diff --git a/tests/queries/0_stateless/03156_nullable_number_tips.sql b/tests/queries/0_stateless/03156_nullable_number_tips.sql index e6f2fa36d86..9a494e3292b 100644 --- a/tests/queries/0_stateless/03156_nullable_number_tips.sql +++ b/tests/queries/0_stateless/03156_nullable_number_tips.sql @@ -1,3 +1,4 @@ +SET output_format_pretty_display_footer_column_names=0; SELECT 123456789 AS x FORMAT PrettyCompact; SELECT toNullable(123456789) AS x FORMAT PrettyCompact; SELECT toLowCardinality(toNullable(123456789)) AS x FORMAT PrettyCompact; diff --git a/tests/queries/0_stateless/03160_pretty_format_tty.sh b/tests/queries/0_stateless/03160_pretty_format_tty.sh index bbc4b96eb90..200bd52f3fa 100755 --- a/tests/queries/0_stateless/03160_pretty_format_tty.sh +++ b/tests/queries/0_stateless/03160_pretty_format_tty.sh @@ -5,4 +5,4 @@ CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) . "$CUR_DIR"/../shell_config.sh # default output_format_pretty_max_rows is 10K -$CLICKHOUSE_LOCAL -q "select * from numbers(100e3) format PrettySpace settings max_threads=1" | wc -l +$CLICKHOUSE_LOCAL -q "select * from numbers(100e3) format PrettySpace settings max_threads=1, output_format_pretty_display_footer_column_names=0" | wc -l diff --git a/tests/queries/0_stateless/03167_boom_filter_index_with_map.reference.j2 b/tests/queries/0_stateless/03167_boom_filter_index_with_map.reference.j2 new file mode 100644 index 00000000000..71dc879f28e --- /dev/null +++ b/tests/queries/0_stateless/03167_boom_filter_index_with_map.reference.j2 @@ -0,0 +1,4 @@ +{% for type in ['Int8', 'Int16', 'Int32', 'Int64', 'UInt8', 'UInt16', 'UInt32', 'UInt64'] -%} +{'xxx':56} +{56:'xxx'} +{% endfor -%} diff --git a/tests/queries/0_stateless/03167_boom_filter_index_with_map.sql.j2 b/tests/queries/0_stateless/03167_boom_filter_index_with_map.sql.j2 new file mode 100644 index 00000000000..4147bd84e8e --- /dev/null +++ b/tests/queries/0_stateless/03167_boom_filter_index_with_map.sql.j2 @@ -0,0 +1,31 @@ +DROP TABLE IF EXISTS boom_filter_map_1; +DROP TABLE IF EXISTS boom_filter_map_2; + +{% for type in ['Int8', 'Int16', 'Int32', 'Int64', 'UInt8', 'UInt16', 'UInt32', 'UInt64'] -%} + +CREATE TABLE boom_filter_map_1 +( + `m` Map(String, {{ type }}), + INDEX index_models_value_bloom_filter mapValues(m) TYPE bloom_filter GRANULARITY 1 +) + ENGINE = MergeTree +ORDER BY tuple(); + +CREATE TABLE boom_filter_map_2 +( + `m` Map({{ type }}, String), + INDEX index_models_value_bloom_filter mapKeys(m) TYPE bloom_filter GRANULARITY 1 +) + ENGINE = MergeTree +ORDER BY tuple(); + +INSERT INTO boom_filter_map_1 (m) values (map('xxx', 56)); +INSERT INTO boom_filter_map_2 (m) values (map(56, 'xxx')); + +SELECT m FROM boom_filter_map_1 WHERE (m['xxx']) = 56; +SELECT m FROM boom_filter_map_2 WHERE (m[56]) = 'xxx'; + +DROP TABLE IF EXISTS boom_filter_map_1; +DROP TABLE IF EXISTS boom_filter_map_2; + +{% endfor -%} diff --git a/tests/queries/0_stateless/03169_display_column_names_in_footer.reference b/tests/queries/0_stateless/03169_display_column_names_in_footer.reference new file mode 100644 index 00000000000..7a9f413a900 --- /dev/null +++ b/tests/queries/0_stateless/03169_display_column_names_in_footer.reference @@ -0,0 +1,2382 @@ + ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ + ┃ number ┃ toTypeName(number) ┃ + ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ + 1. │ 0 │ UInt64 │ + ├────────┼────────────────────┤ + 2. │ 1 │ UInt64 │ + ├────────┼────────────────────┤ + 3. │ 2 │ UInt64 │ + ├────────┼────────────────────┤ + 4. │ 3 │ UInt64 │ + ├────────┼────────────────────┤ + 5. │ 4 │ UInt64 │ + ├────────┼────────────────────┤ + 6. │ 5 │ UInt64 │ + ├────────┼────────────────────┤ + 7. │ 6 │ UInt64 │ + ├────────┼────────────────────┤ + 8. │ 7 │ UInt64 │ + ├────────┼────────────────────┤ + 9. │ 8 │ UInt64 │ + ├────────┼────────────────────┤ +10. │ 9 │ UInt64 │ + ├────────┼────────────────────┤ +11. │ 10 │ UInt64 │ + ├────────┼────────────────────┤ +12. │ 11 │ UInt64 │ + ├────────┼────────────────────┤ +13. │ 12 │ UInt64 │ + ├────────┼────────────────────┤ +14. │ 13 │ UInt64 │ + ├────────┼────────────────────┤ +15. │ 14 │ UInt64 │ + ├────────┼────────────────────┤ +16. │ 15 │ UInt64 │ + ├────────┼────────────────────┤ +17. │ 16 │ UInt64 │ + ├────────┼────────────────────┤ +18. │ 17 │ UInt64 │ + ├────────┼────────────────────┤ +19. │ 18 │ UInt64 │ + ├────────┼────────────────────┤ +20. │ 19 │ UInt64 │ + ├────────┼────────────────────┤ +21. │ 20 │ UInt64 │ + ├────────┼────────────────────┤ +22. │ 21 │ UInt64 │ + ├────────┼────────────────────┤ +23. │ 22 │ UInt64 │ + ├────────┼────────────────────┤ +24. │ 23 │ UInt64 │ + ├────────┼────────────────────┤ +25. │ 24 │ UInt64 │ + ├────────┼────────────────────┤ +26. │ 25 │ UInt64 │ + ├────────┼────────────────────┤ +27. │ 26 │ UInt64 │ + ├────────┼────────────────────┤ +28. │ 27 │ UInt64 │ + ├────────┼────────────────────┤ +29. │ 28 │ UInt64 │ + ├────────┼────────────────────┤ +30. │ 29 │ UInt64 │ + ├────────┼────────────────────┤ +31. │ 30 │ UInt64 │ + ├────────┼────────────────────┤ +32. │ 31 │ UInt64 │ + ├────────┼────────────────────┤ +33. │ 32 │ UInt64 │ + ├────────┼────────────────────┤ +34. │ 33 │ UInt64 │ + ├────────┼────────────────────┤ +35. │ 34 │ UInt64 │ + ├────────┼────────────────────┤ +36. │ 35 │ UInt64 │ + ├────────┼────────────────────┤ +37. │ 36 │ UInt64 │ + ├────────┼────────────────────┤ +38. │ 37 │ UInt64 │ + ├────────┼────────────────────┤ +39. │ 38 │ UInt64 │ + ├────────┼────────────────────┤ +40. │ 39 │ UInt64 │ + ├────────┼────────────────────┤ +41. │ 40 │ UInt64 │ + ├────────┼────────────────────┤ +42. │ 41 │ UInt64 │ + ├────────┼────────────────────┤ +43. │ 42 │ UInt64 │ + ├────────┼────────────────────┤ +44. │ 43 │ UInt64 │ + ├────────┼────────────────────┤ +45. │ 44 │ UInt64 │ + ├────────┼────────────────────┤ +46. │ 45 │ UInt64 │ + ├────────┼────────────────────┤ +47. │ 46 │ UInt64 │ + ├────────┼────────────────────┤ +48. │ 47 │ UInt64 │ + ├────────┼────────────────────┤ +49. │ 48 │ UInt64 │ + └────────┴────────────────────┘ + ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ + ┃ number ┃ toTypeName(number) ┃ + ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ + 1. │ 0 │ UInt64 │ + ├────────┼────────────────────┤ + 2. │ 1 │ UInt64 │ + ├────────┼────────────────────┤ + 3. │ 2 │ UInt64 │ + ├────────┼────────────────────┤ + 4. │ 3 │ UInt64 │ + ├────────┼────────────────────┤ + 5. │ 4 │ UInt64 │ + ├────────┼────────────────────┤ + 6. │ 5 │ UInt64 │ + ├────────┼────────────────────┤ + 7. │ 6 │ UInt64 │ + ├────────┼────────────────────┤ + 8. │ 7 │ UInt64 │ + ├────────┼────────────────────┤ + 9. │ 8 │ UInt64 │ + ├────────┼────────────────────┤ +10. │ 9 │ UInt64 │ + ┣━━━━━━━━╋━━━━━━━━━━━━━━━━━━━━┫ + ┃ number ┃ toTypeName(number) ┃ + ┗━━━━━━━━┻━━━━━━━━━━━━━━━━━━━━┛ + ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ + ┃ number ┃ toTypeName(number) ┃ + ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ + 1. │ 0 │ UInt64 │ + ├────────┼────────────────────┤ + 2. │ 1 │ UInt64 │ + ├────────┼────────────────────┤ + 3. │ 2 │ UInt64 │ + ├────────┼────────────────────┤ + 4. │ 3 │ UInt64 │ + ├────────┼────────────────────┤ + 5. │ 4 │ UInt64 │ + ├────────┼────────────────────┤ + 6. │ 5 │ UInt64 │ + ├────────┼────────────────────┤ + 7. │ 6 │ UInt64 │ + ├────────┼────────────────────┤ + 8. │ 7 │ UInt64 │ + ├────────┼────────────────────┤ + 9. │ 8 │ UInt64 │ + ├────────┼────────────────────┤ + 10. │ 9 │ UInt64 │ + ├────────┼────────────────────┤ + 11. │ 10 │ UInt64 │ + ├────────┼────────────────────┤ + 12. │ 11 │ UInt64 │ + ├────────┼────────────────────┤ + 13. │ 12 │ UInt64 │ + ├────────┼────────────────────┤ + 14. │ 13 │ UInt64 │ + ├────────┼────────────────────┤ + 15. │ 14 │ UInt64 │ + ├────────┼────────────────────┤ + 16. │ 15 │ UInt64 │ + ├────────┼────────────────────┤ + 17. │ 16 │ UInt64 │ + ├────────┼────────────────────┤ + 18. │ 17 │ UInt64 │ + ├────────┼────────────────────┤ + 19. │ 18 │ UInt64 │ + ├────────┼────────────────────┤ + 20. │ 19 │ UInt64 │ + ├────────┼────────────────────┤ + 21. │ 20 │ UInt64 │ + ├────────┼────────────────────┤ + 22. │ 21 │ UInt64 │ + ├────────┼────────────────────┤ + 23. │ 22 │ UInt64 │ + ├────────┼────────────────────┤ + 24. │ 23 │ UInt64 │ + ├────────┼────────────────────┤ + 25. │ 24 │ UInt64 │ + ├────────┼────────────────────┤ + 26. │ 25 │ UInt64 │ + ├────────┼────────────────────┤ + 27. │ 26 │ UInt64 │ + ├────────┼────────────────────┤ + 28. │ 27 │ UInt64 │ + ├────────┼────────────────────┤ + 29. │ 28 │ UInt64 │ + ├────────┼────────────────────┤ + 30. │ 29 │ UInt64 │ + ├────────┼────────────────────┤ + 31. │ 30 │ UInt64 │ + ├────────┼────────────────────┤ + 32. │ 31 │ UInt64 │ + ├────────┼────────────────────┤ + 33. │ 32 │ UInt64 │ + ├────────┼────────────────────┤ + 34. │ 33 │ UInt64 │ + ├────────┼────────────────────┤ + 35. │ 34 │ UInt64 │ + ├────────┼────────────────────┤ + 36. │ 35 │ UInt64 │ + ├────────┼────────────────────┤ + 37. │ 36 │ UInt64 │ + ├────────┼────────────────────┤ + 38. │ 37 │ UInt64 │ + ├────────┼────────────────────┤ + 39. │ 38 │ UInt64 │ + ├────────┼────────────────────┤ + 40. │ 39 │ UInt64 │ + ├────────┼────────────────────┤ + 41. │ 40 │ UInt64 │ + ├────────┼────────────────────┤ + 42. │ 41 │ UInt64 │ + ├────────┼────────────────────┤ + 43. │ 42 │ UInt64 │ + ├────────┼────────────────────┤ + 44. │ 43 │ UInt64 │ + ├────────┼────────────────────┤ + 45. │ 44 │ UInt64 │ + ├────────┼────────────────────┤ + 46. │ 45 │ UInt64 │ + ├────────┼────────────────────┤ + 47. │ 46 │ UInt64 │ + ├────────┼────────────────────┤ + 48. │ 47 │ UInt64 │ + ├────────┼────────────────────┤ + 49. │ 48 │ UInt64 │ + ├────────┼────────────────────┤ + 50. │ 49 │ UInt64 │ + ├────────┼────────────────────┤ + 51. │ 50 │ UInt64 │ + ├────────┼────────────────────┤ + 52. │ 51 │ UInt64 │ + ├────────┼────────────────────┤ + 53. │ 52 │ UInt64 │ + ├────────┼────────────────────┤ + 54. │ 53 │ UInt64 │ + ├────────┼────────────────────┤ + 55. │ 54 │ UInt64 │ + ├────────┼────────────────────┤ + 56. │ 55 │ UInt64 │ + ├────────┼────────────────────┤ + 57. │ 56 │ UInt64 │ + ├────────┼────────────────────┤ + 58. │ 57 │ UInt64 │ + ├────────┼────────────────────┤ + 59. │ 58 │ UInt64 │ + ├────────┼────────────────────┤ + 60. │ 59 │ UInt64 │ + ├────────┼────────────────────┤ + 61. │ 60 │ UInt64 │ + ├────────┼────────────────────┤ + 62. │ 61 │ UInt64 │ + ├────────┼────────────────────┤ + 63. │ 62 │ UInt64 │ + ├────────┼────────────────────┤ + 64. │ 63 │ UInt64 │ + ├────────┼────────────────────┤ + 65. │ 64 │ UInt64 │ + ├────────┼────────────────────┤ + 66. │ 65 │ UInt64 │ + ├────────┼────────────────────┤ + 67. │ 66 │ UInt64 │ + ├────────┼────────────────────┤ + 68. │ 67 │ UInt64 │ + ├────────┼────────────────────┤ + 69. │ 68 │ UInt64 │ + ├────────┼────────────────────┤ + 70. │ 69 │ UInt64 │ + ├────────┼────────────────────┤ + 71. │ 70 │ UInt64 │ + ├────────┼────────────────────┤ + 72. │ 71 │ UInt64 │ + ├────────┼────────────────────┤ + 73. │ 72 │ UInt64 │ + ├────────┼────────────────────┤ + 74. │ 73 │ UInt64 │ + ├────────┼────────────────────┤ + 75. │ 74 │ UInt64 │ + ├────────┼────────────────────┤ + 76. │ 75 │ UInt64 │ + ├────────┼────────────────────┤ + 77. │ 76 │ UInt64 │ + ├────────┼────────────────────┤ + 78. │ 77 │ UInt64 │ + ├────────┼────────────────────┤ + 79. │ 78 │ UInt64 │ + ├────────┼────────────────────┤ + 80. │ 79 │ UInt64 │ + ├────────┼────────────────────┤ + 81. │ 80 │ UInt64 │ + ├────────┼────────────────────┤ + 82. │ 81 │ UInt64 │ + ├────────┼────────────────────┤ + 83. │ 82 │ UInt64 │ + ├────────┼────────────────────┤ + 84. │ 83 │ UInt64 │ + ├────────┼────────────────────┤ + 85. │ 84 │ UInt64 │ + ├────────┼────────────────────┤ + 86. │ 85 │ UInt64 │ + ├────────┼────────────────────┤ + 87. │ 86 │ UInt64 │ + ├────────┼────────────────────┤ + 88. │ 87 │ UInt64 │ + ├────────┼────────────────────┤ + 89. │ 88 │ UInt64 │ + ├────────┼────────────────────┤ + 90. │ 89 │ UInt64 │ + ├────────┼────────────────────┤ + 91. │ 90 │ UInt64 │ + ├────────┼────────────────────┤ + 92. │ 91 │ UInt64 │ + ├────────┼────────────────────┤ + 93. │ 92 │ UInt64 │ + ├────────┼────────────────────┤ + 94. │ 93 │ UInt64 │ + ├────────┼────────────────────┤ + 95. │ 94 │ UInt64 │ + ├────────┼────────────────────┤ + 96. │ 95 │ UInt64 │ + ├────────┼────────────────────┤ + 97. │ 96 │ UInt64 │ + ├────────┼────────────────────┤ + 98. │ 97 │ UInt64 │ + ├────────┼────────────────────┤ + 99. │ 98 │ UInt64 │ + ├────────┼────────────────────┤ +100. │ 99 │ UInt64 │ + └────────┴────────────────────┘ + ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ + ┃ number ┃ toTypeName(number) ┃ + ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ + 1. │ 0 │ UInt64 │ + ├────────┼────────────────────┤ + 2. │ 1 │ UInt64 │ + ├────────┼────────────────────┤ + 3. │ 2 │ UInt64 │ + ├────────┼────────────────────┤ + 4. │ 3 │ UInt64 │ + ├────────┼────────────────────┤ + 5. │ 4 │ UInt64 │ + ├────────┼────────────────────┤ + 6. │ 5 │ UInt64 │ + ├────────┼────────────────────┤ + 7. │ 6 │ UInt64 │ + ├────────┼────────────────────┤ + 8. │ 7 │ UInt64 │ + ├────────┼────────────────────┤ + 9. │ 8 │ UInt64 │ + ├────────┼────────────────────┤ + 10. │ 9 │ UInt64 │ + ├────────┼────────────────────┤ + 11. │ 10 │ UInt64 │ + ├────────┼────────────────────┤ + 12. │ 11 │ UInt64 │ + ├────────┼────────────────────┤ + 13. │ 12 │ UInt64 │ + ├────────┼────────────────────┤ + 14. │ 13 │ UInt64 │ + ├────────┼────────────────────┤ + 15. │ 14 │ UInt64 │ + ├────────┼────────────────────┤ + 16. │ 15 │ UInt64 │ + ├────────┼────────────────────┤ + 17. │ 16 │ UInt64 │ + ├────────┼────────────────────┤ + 18. │ 17 │ UInt64 │ + ├────────┼────────────────────┤ + 19. │ 18 │ UInt64 │ + ├────────┼────────────────────┤ + 20. │ 19 │ UInt64 │ + ├────────┼────────────────────┤ + 21. │ 20 │ UInt64 │ + ├────────┼────────────────────┤ + 22. │ 21 │ UInt64 │ + ├────────┼────────────────────┤ + 23. │ 22 │ UInt64 │ + ├────────┼────────────────────┤ + 24. │ 23 │ UInt64 │ + ├────────┼────────────────────┤ + 25. │ 24 │ UInt64 │ + ├────────┼────────────────────┤ + 26. │ 25 │ UInt64 │ + ├────────┼────────────────────┤ + 27. │ 26 │ UInt64 │ + ├────────┼────────────────────┤ + 28. │ 27 │ UInt64 │ + ├────────┼────────────────────┤ + 29. │ 28 │ UInt64 │ + ├────────┼────────────────────┤ + 30. │ 29 │ UInt64 │ + ├────────┼────────────────────┤ + 31. │ 30 │ UInt64 │ + ├────────┼────────────────────┤ + 32. │ 31 │ UInt64 │ + ├────────┼────────────────────┤ + 33. │ 32 │ UInt64 │ + ├────────┼────────────────────┤ + 34. │ 33 │ UInt64 │ + ├────────┼────────────────────┤ + 35. │ 34 │ UInt64 │ + ├────────┼────────────────────┤ + 36. │ 35 │ UInt64 │ + ├────────┼────────────────────┤ + 37. │ 36 │ UInt64 │ + ├────────┼────────────────────┤ + 38. │ 37 │ UInt64 │ + ├────────┼────────────────────┤ + 39. │ 38 │ UInt64 │ + ├────────┼────────────────────┤ + 40. │ 39 │ UInt64 │ + ├────────┼────────────────────┤ + 41. │ 40 │ UInt64 │ + ├────────┼────────────────────┤ + 42. │ 41 │ UInt64 │ + ├────────┼────────────────────┤ + 43. │ 42 │ UInt64 │ + ├────────┼────────────────────┤ + 44. │ 43 │ UInt64 │ + ├────────┼────────────────────┤ + 45. │ 44 │ UInt64 │ + ├────────┼────────────────────┤ + 46. │ 45 │ UInt64 │ + ├────────┼────────────────────┤ + 47. │ 46 │ UInt64 │ + ├────────┼────────────────────┤ + 48. │ 47 │ UInt64 │ + ├────────┼────────────────────┤ + 49. │ 48 │ UInt64 │ + ├────────┼────────────────────┤ + 50. │ 49 │ UInt64 │ + ├────────┼────────────────────┤ + 51. │ 50 │ UInt64 │ + ├────────┼────────────────────┤ + 52. │ 51 │ UInt64 │ + ├────────┼────────────────────┤ + 53. │ 52 │ UInt64 │ + ├────────┼────────────────────┤ + 54. │ 53 │ UInt64 │ + ├────────┼────────────────────┤ + 55. │ 54 │ UInt64 │ + ├────────┼────────────────────┤ + 56. │ 55 │ UInt64 │ + ├────────┼────────────────────┤ + 57. │ 56 │ UInt64 │ + ├────────┼────────────────────┤ + 58. │ 57 │ UInt64 │ + ├────────┼────────────────────┤ + 59. │ 58 │ UInt64 │ + ├────────┼────────────────────┤ + 60. │ 59 │ UInt64 │ + ├────────┼────────────────────┤ + 61. │ 60 │ UInt64 │ + ├────────┼────────────────────┤ + 62. │ 61 │ UInt64 │ + ├────────┼────────────────────┤ + 63. │ 62 │ UInt64 │ + ├────────┼────────────────────┤ + 64. │ 63 │ UInt64 │ + ├────────┼────────────────────┤ + 65. │ 64 │ UInt64 │ + ├────────┼────────────────────┤ + 66. │ 65 │ UInt64 │ + ├────────┼────────────────────┤ + 67. │ 66 │ UInt64 │ + ├────────┼────────────────────┤ + 68. │ 67 │ UInt64 │ + ├────────┼────────────────────┤ + 69. │ 68 │ UInt64 │ + ├────────┼────────────────────┤ + 70. │ 69 │ UInt64 │ + ├────────┼────────────────────┤ + 71. │ 70 │ UInt64 │ + ├────────┼────────────────────┤ + 72. │ 71 │ UInt64 │ + ├────────┼────────────────────┤ + 73. │ 72 │ UInt64 │ + ├────────┼────────────────────┤ + 74. │ 73 │ UInt64 │ + ├────────┼────────────────────┤ + 75. │ 74 │ UInt64 │ + ├────────┼────────────────────┤ + 76. │ 75 │ UInt64 │ + ├────────┼────────────────────┤ + 77. │ 76 │ UInt64 │ + ├────────┼────────────────────┤ + 78. │ 77 │ UInt64 │ + ├────────┼────────────────────┤ + 79. │ 78 │ UInt64 │ + ├────────┼────────────────────┤ + 80. │ 79 │ UInt64 │ + ├────────┼────────────────────┤ + 81. │ 80 │ UInt64 │ + ├────────┼────────────────────┤ + 82. │ 81 │ UInt64 │ + ├────────┼────────────────────┤ + 83. │ 82 │ UInt64 │ + ├────────┼────────────────────┤ + 84. │ 83 │ UInt64 │ + ├────────┼────────────────────┤ + 85. │ 84 │ UInt64 │ + ├────────┼────────────────────┤ + 86. │ 85 │ UInt64 │ + ├────────┼────────────────────┤ + 87. │ 86 │ UInt64 │ + ├────────┼────────────────────┤ + 88. │ 87 │ UInt64 │ + ├────────┼────────────────────┤ + 89. │ 88 │ UInt64 │ + ├────────┼────────────────────┤ + 90. │ 89 │ UInt64 │ + ├────────┼────────────────────┤ + 91. │ 90 │ UInt64 │ + ├────────┼────────────────────┤ + 92. │ 91 │ UInt64 │ + ├────────┼────────────────────┤ + 93. │ 92 │ UInt64 │ + ├────────┼────────────────────┤ + 94. │ 93 │ UInt64 │ + ├────────┼────────────────────┤ + 95. │ 94 │ UInt64 │ + ├────────┼────────────────────┤ + 96. │ 95 │ UInt64 │ + ├────────┼────────────────────┤ + 97. │ 96 │ UInt64 │ + ├────────┼────────────────────┤ + 98. │ 97 │ UInt64 │ + ├────────┼────────────────────┤ + 99. │ 98 │ UInt64 │ + ├────────┼────────────────────┤ +100. │ 99 │ UInt64 │ + ┣━━━━━━━━╋━━━━━━━━━━━━━━━━━━━━┫ + ┃ number ┃ toTypeName(number) ┃ + ┗━━━━━━━━┻━━━━━━━━━━━━━━━━━━━━┛ + ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ + ┃ number ┃ toTypeName(number) ┃ + ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ + 1. │ 0 │ UInt64 │ + ├────────┼────────────────────┤ + 2. │ 1 │ UInt64 │ + ├────────┼────────────────────┤ + 3. │ 2 │ UInt64 │ + ├────────┼────────────────────┤ + 4. │ 3 │ UInt64 │ + ├────────┼────────────────────┤ + 5. │ 4 │ UInt64 │ + ├────────┼────────────────────┤ + 6. │ 5 │ UInt64 │ + ├────────┼────────────────────┤ + 7. │ 6 │ UInt64 │ + ├────────┼────────────────────┤ + 8. │ 7 │ UInt64 │ + ├────────┼────────────────────┤ + 9. │ 8 │ UInt64 │ + ├────────┼────────────────────┤ + 10. │ 9 │ UInt64 │ + ├────────┼────────────────────┤ + 11. │ 10 │ UInt64 │ + ├────────┼────────────────────┤ + 12. │ 11 │ UInt64 │ + ├────────┼────────────────────┤ + 13. │ 12 │ UInt64 │ + ├────────┼────────────────────┤ + 14. │ 13 │ UInt64 │ + ├────────┼────────────────────┤ + 15. │ 14 │ UInt64 │ + ├────────┼────────────────────┤ + 16. │ 15 │ UInt64 │ + ├────────┼────────────────────┤ + 17. │ 16 │ UInt64 │ + ├────────┼────────────────────┤ + 18. │ 17 │ UInt64 │ + ├────────┼────────────────────┤ + 19. │ 18 │ UInt64 │ + ├────────┼────────────────────┤ + 20. │ 19 │ UInt64 │ + ├────────┼────────────────────┤ + 21. │ 20 │ UInt64 │ + ├────────┼────────────────────┤ + 22. │ 21 │ UInt64 │ + ├────────┼────────────────────┤ + 23. │ 22 │ UInt64 │ + ├────────┼────────────────────┤ + 24. │ 23 │ UInt64 │ + ├────────┼────────────────────┤ + 25. │ 24 │ UInt64 │ + ├────────┼────────────────────┤ + 26. │ 25 │ UInt64 │ + ├────────┼────────────────────┤ + 27. │ 26 │ UInt64 │ + ├────────┼────────────────────┤ + 28. │ 27 │ UInt64 │ + ├────────┼────────────────────┤ + 29. │ 28 │ UInt64 │ + ├────────┼────────────────────┤ + 30. │ 29 │ UInt64 │ + ├────────┼────────────────────┤ + 31. │ 30 │ UInt64 │ + ├────────┼────────────────────┤ + 32. │ 31 │ UInt64 │ + ├────────┼────────────────────┤ + 33. │ 32 │ UInt64 │ + ├────────┼────────────────────┤ + 34. │ 33 │ UInt64 │ + ├────────┼────────────────────┤ + 35. │ 34 │ UInt64 │ + ├────────┼────────────────────┤ + 36. │ 35 │ UInt64 │ + ├────────┼────────────────────┤ + 37. │ 36 │ UInt64 │ + ├────────┼────────────────────┤ + 38. │ 37 │ UInt64 │ + ├────────┼────────────────────┤ + 39. │ 38 │ UInt64 │ + ├────────┼────────────────────┤ + 40. │ 39 │ UInt64 │ + ├────────┼────────────────────┤ + 41. │ 40 │ UInt64 │ + ├────────┼────────────────────┤ + 42. │ 41 │ UInt64 │ + ├────────┼────────────────────┤ + 43. │ 42 │ UInt64 │ + ├────────┼────────────────────┤ + 44. │ 43 │ UInt64 │ + ├────────┼────────────────────┤ + 45. │ 44 │ UInt64 │ + ├────────┼────────────────────┤ + 46. │ 45 │ UInt64 │ + ├────────┼────────────────────┤ + 47. │ 46 │ UInt64 │ + ├────────┼────────────────────┤ + 48. │ 47 │ UInt64 │ + ├────────┼────────────────────┤ + 49. │ 48 │ UInt64 │ + ├────────┼────────────────────┤ + 50. │ 49 │ UInt64 │ + ├────────┼────────────────────┤ + 51. │ 50 │ UInt64 │ + ├────────┼────────────────────┤ + 52. │ 51 │ UInt64 │ + ├────────┼────────────────────┤ + 53. │ 52 │ UInt64 │ + ├────────┼────────────────────┤ + 54. │ 53 │ UInt64 │ + ├────────┼────────────────────┤ + 55. │ 54 │ UInt64 │ + ├────────┼────────────────────┤ + 56. │ 55 │ UInt64 │ + ├────────┼────────────────────┤ + 57. │ 56 │ UInt64 │ + ├────────┼────────────────────┤ + 58. │ 57 │ UInt64 │ + ├────────┼────────────────────┤ + 59. │ 58 │ UInt64 │ + ├────────┼────────────────────┤ + 60. │ 59 │ UInt64 │ + ├────────┼────────────────────┤ + 61. │ 60 │ UInt64 │ + ├────────┼────────────────────┤ + 62. │ 61 │ UInt64 │ + ├────────┼────────────────────┤ + 63. │ 62 │ UInt64 │ + ├────────┼────────────────────┤ + 64. │ 63 │ UInt64 │ + ├────────┼────────────────────┤ + 65. │ 64 │ UInt64 │ + ├────────┼────────────────────┤ + 66. │ 65 │ UInt64 │ + ├────────┼────────────────────┤ + 67. │ 66 │ UInt64 │ + ├────────┼────────────────────┤ + 68. │ 67 │ UInt64 │ + ├────────┼────────────────────┤ + 69. │ 68 │ UInt64 │ + ├────────┼────────────────────┤ + 70. │ 69 │ UInt64 │ + ├────────┼────────────────────┤ + 71. │ 70 │ UInt64 │ + ├────────┼────────────────────┤ + 72. │ 71 │ UInt64 │ + ├────────┼────────────────────┤ + 73. │ 72 │ UInt64 │ + ├────────┼────────────────────┤ + 74. │ 73 │ UInt64 │ + ├────────┼────────────────────┤ + 75. │ 74 │ UInt64 │ + ├────────┼────────────────────┤ + 76. │ 75 │ UInt64 │ + ├────────┼────────────────────┤ + 77. │ 76 │ UInt64 │ + ├────────┼────────────────────┤ + 78. │ 77 │ UInt64 │ + ├────────┼────────────────────┤ + 79. │ 78 │ UInt64 │ + ├────────┼────────────────────┤ + 80. │ 79 │ UInt64 │ + ├────────┼────────────────────┤ + 81. │ 80 │ UInt64 │ + ├────────┼────────────────────┤ + 82. │ 81 │ UInt64 │ + ├────────┼────────────────────┤ + 83. │ 82 │ UInt64 │ + ├────────┼────────────────────┤ + 84. │ 83 │ UInt64 │ + ├────────┼────────────────────┤ + 85. │ 84 │ UInt64 │ + ├────────┼────────────────────┤ + 86. │ 85 │ UInt64 │ + ├────────┼────────────────────┤ + 87. │ 86 │ UInt64 │ + ├────────┼────────────────────┤ + 88. │ 87 │ UInt64 │ + ├────────┼────────────────────┤ + 89. │ 88 │ UInt64 │ + ├────────┼────────────────────┤ + 90. │ 89 │ UInt64 │ + ├────────┼────────────────────┤ + 91. │ 90 │ UInt64 │ + ├────────┼────────────────────┤ + 92. │ 91 │ UInt64 │ + ├────────┼────────────────────┤ + 93. │ 92 │ UInt64 │ + ├────────┼────────────────────┤ + 94. │ 93 │ UInt64 │ + ├────────┼────────────────────┤ + 95. │ 94 │ UInt64 │ + ├────────┼────────────────────┤ + 96. │ 95 │ UInt64 │ + ├────────┼────────────────────┤ + 97. │ 96 │ UInt64 │ + ├────────┼────────────────────┤ + 98. │ 97 │ UInt64 │ + ├────────┼────────────────────┤ + 99. │ 98 │ UInt64 │ + ├────────┼────────────────────┤ +100. │ 99 │ UInt64 │ + ┣━━━━━━━━╋━━━━━━━━━━━━━━━━━━━━┫ + ┃ number ┃ toTypeName(number) ┃ + ┗━━━━━━━━┻━━━━━━━━━━━━━━━━━━━━┛ + ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ + ┃ number ┃ toTypeName(number) ┃ + ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ + 1. │ 0 │ UInt64 │ + ├────────┼────────────────────┤ + 2. │ 1 │ UInt64 │ + ├────────┼────────────────────┤ + 3. │ 2 │ UInt64 │ + ├────────┼────────────────────┤ + 4. │ 3 │ UInt64 │ + ├────────┼────────────────────┤ + 5. │ 4 │ UInt64 │ + ├────────┼────────────────────┤ + 6. │ 5 │ UInt64 │ + ├────────┼────────────────────┤ + 7. │ 6 │ UInt64 │ + ├────────┼────────────────────┤ + 8. │ 7 │ UInt64 │ + ├────────┼────────────────────┤ + 9. │ 8 │ UInt64 │ + ├────────┼────────────────────┤ + 10. │ 9 │ UInt64 │ + ├────────┼────────────────────┤ + 11. │ 10 │ UInt64 │ + ├────────┼────────────────────┤ + 12. │ 11 │ UInt64 │ + ├────────┼────────────────────┤ + 13. │ 12 │ UInt64 │ + ├────────┼────────────────────┤ + 14. │ 13 │ UInt64 │ + ├────────┼────────────────────┤ + 15. │ 14 │ UInt64 │ + ├────────┼────────────────────┤ + 16. │ 15 │ UInt64 │ + ├────────┼────────────────────┤ + 17. │ 16 │ UInt64 │ + ├────────┼────────────────────┤ + 18. │ 17 │ UInt64 │ + ├────────┼────────────────────┤ + 19. │ 18 │ UInt64 │ + ├────────┼────────────────────┤ + 20. │ 19 │ UInt64 │ + ├────────┼────────────────────┤ + 21. │ 20 │ UInt64 │ + ├────────┼────────────────────┤ + 22. │ 21 │ UInt64 │ + ├────────┼────────────────────┤ + 23. │ 22 │ UInt64 │ + ├────────┼────────────────────┤ + 24. │ 23 │ UInt64 │ + ├────────┼────────────────────┤ + 25. │ 24 │ UInt64 │ + ├────────┼────────────────────┤ + 26. │ 25 │ UInt64 │ + ├────────┼────────────────────┤ + 27. │ 26 │ UInt64 │ + ├────────┼────────────────────┤ + 28. │ 27 │ UInt64 │ + ├────────┼────────────────────┤ + 29. │ 28 │ UInt64 │ + ├────────┼────────────────────┤ + 30. │ 29 │ UInt64 │ + ├────────┼────────────────────┤ + 31. │ 30 │ UInt64 │ + ├────────┼────────────────────┤ + 32. │ 31 │ UInt64 │ + ├────────┼────────────────────┤ + 33. │ 32 │ UInt64 │ + ├────────┼────────────────────┤ + 34. │ 33 │ UInt64 │ + ├────────┼────────────────────┤ + 35. │ 34 │ UInt64 │ + ├────────┼────────────────────┤ + 36. │ 35 │ UInt64 │ + ├────────┼────────────────────┤ + 37. │ 36 │ UInt64 │ + ├────────┼────────────────────┤ + 38. │ 37 │ UInt64 │ + ├────────┼────────────────────┤ + 39. │ 38 │ UInt64 │ + ├────────┼────────────────────┤ + 40. │ 39 │ UInt64 │ + ├────────┼────────────────────┤ + 41. │ 40 │ UInt64 │ + ├────────┼────────────────────┤ + 42. │ 41 │ UInt64 │ + ├────────┼────────────────────┤ + 43. │ 42 │ UInt64 │ + ├────────┼────────────────────┤ + 44. │ 43 │ UInt64 │ + ├────────┼────────────────────┤ + 45. │ 44 │ UInt64 │ + ├────────┼────────────────────┤ + 46. │ 45 │ UInt64 │ + ├────────┼────────────────────┤ + 47. │ 46 │ UInt64 │ + ├────────┼────────────────────┤ + 48. │ 47 │ UInt64 │ + ├────────┼────────────────────┤ + 49. │ 48 │ UInt64 │ + ├────────┼────────────────────┤ + 50. │ 49 │ UInt64 │ + ├────────┼────────────────────┤ + 51. │ 50 │ UInt64 │ + ├────────┼────────────────────┤ + 52. │ 51 │ UInt64 │ + ├────────┼────────────────────┤ + 53. │ 52 │ UInt64 │ + ├────────┼────────────────────┤ + 54. │ 53 │ UInt64 │ + ├────────┼────────────────────┤ + 55. │ 54 │ UInt64 │ + ├────────┼────────────────────┤ + 56. │ 55 │ UInt64 │ + ├────────┼────────────────────┤ + 57. │ 56 │ UInt64 │ + ├────────┼────────────────────┤ + 58. │ 57 │ UInt64 │ + ├────────┼────────────────────┤ + 59. │ 58 │ UInt64 │ + ├────────┼────────────────────┤ + 60. │ 59 │ UInt64 │ + ├────────┼────────────────────┤ + 61. │ 60 │ UInt64 │ + ├────────┼────────────────────┤ + 62. │ 61 │ UInt64 │ + ├────────┼────────────────────┤ + 63. │ 62 │ UInt64 │ + ├────────┼────────────────────┤ + 64. │ 63 │ UInt64 │ + ├────────┼────────────────────┤ + 65. │ 64 │ UInt64 │ + ├────────┼────────────────────┤ + 66. │ 65 │ UInt64 │ + ├────────┼────────────────────┤ + 67. │ 66 │ UInt64 │ + ├────────┼────────────────────┤ + 68. │ 67 │ UInt64 │ + ├────────┼────────────────────┤ + 69. │ 68 │ UInt64 │ + ├────────┼────────────────────┤ + 70. │ 69 │ UInt64 │ + ├────────┼────────────────────┤ + 71. │ 70 │ UInt64 │ + ├────────┼────────────────────┤ + 72. │ 71 │ UInt64 │ + ├────────┼────────────────────┤ + 73. │ 72 │ UInt64 │ + ├────────┼────────────────────┤ + 74. │ 73 │ UInt64 │ + ├────────┼────────────────────┤ + 75. │ 74 │ UInt64 │ + ├────────┼────────────────────┤ + 76. │ 75 │ UInt64 │ + ├────────┼────────────────────┤ + 77. │ 76 │ UInt64 │ + ├────────┼────────────────────┤ + 78. │ 77 │ UInt64 │ + ├────────┼────────────────────┤ + 79. │ 78 │ UInt64 │ + ├────────┼────────────────────┤ + 80. │ 79 │ UInt64 │ + ├────────┼────────────────────┤ + 81. │ 80 │ UInt64 │ + ├────────┼────────────────────┤ + 82. │ 81 │ UInt64 │ + ├────────┼────────────────────┤ + 83. │ 82 │ UInt64 │ + ├────────┼────────────────────┤ + 84. │ 83 │ UInt64 │ + ├────────┼────────────────────┤ + 85. │ 84 │ UInt64 │ + ├────────┼────────────────────┤ + 86. │ 85 │ UInt64 │ + ├────────┼────────────────────┤ + 87. │ 86 │ UInt64 │ + ├────────┼────────────────────┤ + 88. │ 87 │ UInt64 │ + ├────────┼────────────────────┤ + 89. │ 88 │ UInt64 │ + ├────────┼────────────────────┤ + 90. │ 89 │ UInt64 │ + ├────────┼────────────────────┤ + 91. │ 90 │ UInt64 │ + ├────────┼────────────────────┤ + 92. │ 91 │ UInt64 │ + ├────────┼────────────────────┤ + 93. │ 92 │ UInt64 │ + ├────────┼────────────────────┤ + 94. │ 93 │ UInt64 │ + ├────────┼────────────────────┤ + 95. │ 94 │ UInt64 │ + ├────────┼────────────────────┤ + 96. │ 95 │ UInt64 │ + ├────────┼────────────────────┤ + 97. │ 96 │ UInt64 │ + ├────────┼────────────────────┤ + 98. │ 97 │ UInt64 │ + ├────────┼────────────────────┤ + 99. │ 98 │ UInt64 │ + ├────────┼────────────────────┤ +100. │ 99 │ UInt64 │ + ┣━━━━━━━━╋━━━━━━━━━━━━━━━━━━━━┫ + ┃ number ┃ toTypeName(number) ┃ + ┗━━━━━━━━┻━━━━━━━━━━━━━━━━━━━━┛ + ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ + ┃ number ┃ toTypeName(number) ┃ + ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ + 1. │ 0 │ UInt64 │ + ├────────┼────────────────────┤ + 2. │ 1 │ UInt64 │ + ├────────┼────────────────────┤ + 3. │ 2 │ UInt64 │ + ├────────┼────────────────────┤ + 4. │ 3 │ UInt64 │ + ├────────┼────────────────────┤ + 5. │ 4 │ UInt64 │ + ├────────┼────────────────────┤ + 6. │ 5 │ UInt64 │ + ├────────┼────────────────────┤ + 7. │ 6 │ UInt64 │ + ├────────┼────────────────────┤ + 8. │ 7 │ UInt64 │ + ├────────┼────────────────────┤ + 9. │ 8 │ UInt64 │ + ├────────┼────────────────────┤ + 10. │ 9 │ UInt64 │ + ├────────┼────────────────────┤ + 11. │ 10 │ UInt64 │ + ├────────┼────────────────────┤ + 12. │ 11 │ UInt64 │ + ├────────┼────────────────────┤ + 13. │ 12 │ UInt64 │ + ├────────┼────────────────────┤ + 14. │ 13 │ UInt64 │ + ├────────┼────────────────────┤ + 15. │ 14 │ UInt64 │ + ├────────┼────────────────────┤ + 16. │ 15 │ UInt64 │ + ├────────┼────────────────────┤ + 17. │ 16 │ UInt64 │ + ├────────┼────────────────────┤ + 18. │ 17 │ UInt64 │ + ├────────┼────────────────────┤ + 19. │ 18 │ UInt64 │ + ├────────┼────────────────────┤ + 20. │ 19 │ UInt64 │ + ├────────┼────────────────────┤ + 21. │ 20 │ UInt64 │ + ├────────┼────────────────────┤ + 22. │ 21 │ UInt64 │ + ├────────┼────────────────────┤ + 23. │ 22 │ UInt64 │ + ├────────┼────────────────────┤ + 24. │ 23 │ UInt64 │ + ├────────┼────────────────────┤ + 25. │ 24 │ UInt64 │ + ├────────┼────────────────────┤ + 26. │ 25 │ UInt64 │ + ├────────┼────────────────────┤ + 27. │ 26 │ UInt64 │ + ├────────┼────────────────────┤ + 28. │ 27 │ UInt64 │ + ├────────┼────────────────────┤ + 29. │ 28 │ UInt64 │ + ├────────┼────────────────────┤ + 30. │ 29 │ UInt64 │ + ├────────┼────────────────────┤ + 31. │ 30 │ UInt64 │ + ├────────┼────────────────────┤ + 32. │ 31 │ UInt64 │ + ├────────┼────────────────────┤ + 33. │ 32 │ UInt64 │ + ├────────┼────────────────────┤ + 34. │ 33 │ UInt64 │ + ├────────┼────────────────────┤ + 35. │ 34 │ UInt64 │ + ├────────┼────────────────────┤ + 36. │ 35 │ UInt64 │ + ├────────┼────────────────────┤ + 37. │ 36 │ UInt64 │ + ├────────┼────────────────────┤ + 38. │ 37 │ UInt64 │ + ├────────┼────────────────────┤ + 39. │ 38 │ UInt64 │ + ├────────┼────────────────────┤ + 40. │ 39 │ UInt64 │ + ├────────┼────────────────────┤ + 41. │ 40 │ UInt64 │ + ├────────┼────────────────────┤ + 42. │ 41 │ UInt64 │ + ├────────┼────────────────────┤ + 43. │ 42 │ UInt64 │ + ├────────┼────────────────────┤ + 44. │ 43 │ UInt64 │ + ├────────┼────────────────────┤ + 45. │ 44 │ UInt64 │ + ├────────┼────────────────────┤ + 46. │ 45 │ UInt64 │ + ├────────┼────────────────────┤ + 47. │ 46 │ UInt64 │ + ├────────┼────────────────────┤ + 48. │ 47 │ UInt64 │ + ├────────┼────────────────────┤ + 49. │ 48 │ UInt64 │ + ├────────┼────────────────────┤ + 50. │ 49 │ UInt64 │ + ├────────┼────────────────────┤ + 51. │ 50 │ UInt64 │ + ├────────┼────────────────────┤ + 52. │ 51 │ UInt64 │ + ├────────┼────────────────────┤ + 53. │ 52 │ UInt64 │ + ├────────┼────────────────────┤ + 54. │ 53 │ UInt64 │ + ├────────┼────────────────────┤ + 55. │ 54 │ UInt64 │ + ├────────┼────────────────────┤ + 56. │ 55 │ UInt64 │ + ├────────┼────────────────────┤ + 57. │ 56 │ UInt64 │ + ├────────┼────────────────────┤ + 58. │ 57 │ UInt64 │ + ├────────┼────────────────────┤ + 59. │ 58 │ UInt64 │ + ├────────┼────────────────────┤ + 60. │ 59 │ UInt64 │ + ├────────┼────────────────────┤ + 61. │ 60 │ UInt64 │ + ├────────┼────────────────────┤ + 62. │ 61 │ UInt64 │ + ├────────┼────────────────────┤ + 63. │ 62 │ UInt64 │ + ├────────┼────────────────────┤ + 64. │ 63 │ UInt64 │ + ├────────┼────────────────────┤ + 65. │ 64 │ UInt64 │ + ├────────┼────────────────────┤ + 66. │ 65 │ UInt64 │ + ├────────┼────────────────────┤ + 67. │ 66 │ UInt64 │ + ├────────┼────────────────────┤ + 68. │ 67 │ UInt64 │ + ├────────┼────────────────────┤ + 69. │ 68 │ UInt64 │ + ├────────┼────────────────────┤ + 70. │ 69 │ UInt64 │ + ├────────┼────────────────────┤ + 71. │ 70 │ UInt64 │ + ├────────┼────────────────────┤ + 72. │ 71 │ UInt64 │ + ├────────┼────────────────────┤ + 73. │ 72 │ UInt64 │ + ├────────┼────────────────────┤ + 74. │ 73 │ UInt64 │ + ├────────┼────────────────────┤ + 75. │ 74 │ UInt64 │ + ├────────┼────────────────────┤ + 76. │ 75 │ UInt64 │ + ├────────┼────────────────────┤ + 77. │ 76 │ UInt64 │ + ├────────┼────────────────────┤ + 78. │ 77 │ UInt64 │ + ├────────┼────────────────────┤ + 79. │ 78 │ UInt64 │ + ├────────┼────────────────────┤ + 80. │ 79 │ UInt64 │ + ├────────┼────────────────────┤ + 81. │ 80 │ UInt64 │ + ├────────┼────────────────────┤ + 82. │ 81 │ UInt64 │ + ├────────┼────────────────────┤ + 83. │ 82 │ UInt64 │ + ├────────┼────────────────────┤ + 84. │ 83 │ UInt64 │ + ├────────┼────────────────────┤ + 85. │ 84 │ UInt64 │ + ├────────┼────────────────────┤ + 86. │ 85 │ UInt64 │ + ├────────┼────────────────────┤ + 87. │ 86 │ UInt64 │ + ├────────┼────────────────────┤ + 88. │ 87 │ UInt64 │ + ├────────┼────────────────────┤ + 89. │ 88 │ UInt64 │ + ├────────┼────────────────────┤ + 90. │ 89 │ UInt64 │ + ├────────┼────────────────────┤ + 91. │ 90 │ UInt64 │ + ├────────┼────────────────────┤ + 92. │ 91 │ UInt64 │ + ├────────┼────────────────────┤ + 93. │ 92 │ UInt64 │ + ├────────┼────────────────────┤ + 94. │ 93 │ UInt64 │ + ├────────┼────────────────────┤ + 95. │ 94 │ UInt64 │ + ├────────┼────────────────────┤ + 96. │ 95 │ UInt64 │ + ├────────┼────────────────────┤ + 97. │ 96 │ UInt64 │ + ├────────┼────────────────────┤ + 98. │ 97 │ UInt64 │ + ├────────┼────────────────────┤ + 99. │ 98 │ UInt64 │ + ├────────┼────────────────────┤ +100. │ 99 │ UInt64 │ + ┣━━━━━━━━╋━━━━━━━━━━━━━━━━━━━━┫ + ┃ number ┃ toTypeName(number) ┃ + ┗━━━━━━━━┻━━━━━━━━━━━━━━━━━━━━┛ + ┏━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┓ + ┃ number ┃ toTypeName(number) ┃ + ┡━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━┩ + 1. │ 0 │ UInt64 │ + ├────────┼────────────────────┤ + 2. │ 1 │ UInt64 │ + ├────────┼────────────────────┤ + 3. │ 2 │ UInt64 │ + ├────────┼────────────────────┤ + 4. │ 3 │ UInt64 │ + ├────────┼────────────────────┤ + 5. │ 4 │ UInt64 │ + ├────────┼────────────────────┤ + 6. │ 5 │ UInt64 │ + ├────────┼────────────────────┤ + 7. │ 6 │ UInt64 │ + ├────────┼────────────────────┤ + 8. │ 7 │ UInt64 │ + ├────────┼────────────────────┤ + 9. │ 8 │ UInt64 │ + ├────────┼────────────────────┤ + 10. │ 9 │ UInt64 │ + ├────────┼────────────────────┤ + 11. │ 10 │ UInt64 │ + ├────────┼────────────────────┤ + 12. │ 11 │ UInt64 │ + ├────────┼────────────────────┤ + 13. │ 12 │ UInt64 │ + ├────────┼────────────────────┤ + 14. │ 13 │ UInt64 │ + ├────────┼────────────────────┤ + 15. │ 14 │ UInt64 │ + ├────────┼────────────────────┤ + 16. │ 15 │ UInt64 │ + ├────────┼────────────────────┤ + 17. │ 16 │ UInt64 │ + ├────────┼────────────────────┤ + 18. │ 17 │ UInt64 │ + ├────────┼────────────────────┤ + 19. │ 18 │ UInt64 │ + ├────────┼────────────────────┤ + 20. │ 19 │ UInt64 │ + ├────────┼────────────────────┤ + 21. │ 20 │ UInt64 │ + ├────────┼────────────────────┤ + 22. │ 21 │ UInt64 │ + ├────────┼────────────────────┤ + 23. │ 22 │ UInt64 │ + ├────────┼────────────────────┤ + 24. │ 23 │ UInt64 │ + ├────────┼────────────────────┤ + 25. │ 24 │ UInt64 │ + ├────────┼────────────────────┤ + 26. │ 25 │ UInt64 │ + ├────────┼────────────────────┤ + 27. │ 26 │ UInt64 │ + ├────────┼────────────────────┤ + 28. │ 27 │ UInt64 │ + ├────────┼────────────────────┤ + 29. │ 28 │ UInt64 │ + ├────────┼────────────────────┤ + 30. │ 29 │ UInt64 │ + ├────────┼────────────────────┤ + 31. │ 30 │ UInt64 │ + ├────────┼────────────────────┤ + 32. │ 31 │ UInt64 │ + ├────────┼────────────────────┤ + 33. │ 32 │ UInt64 │ + ├────────┼────────────────────┤ + 34. │ 33 │ UInt64 │ + ├────────┼────────────────────┤ + 35. │ 34 │ UInt64 │ + ├────────┼────────────────────┤ + 36. │ 35 │ UInt64 │ + ├────────┼────────────────────┤ + 37. │ 36 │ UInt64 │ + ├────────┼────────────────────┤ + 38. │ 37 │ UInt64 │ + ├────────┼────────────────────┤ + 39. │ 38 │ UInt64 │ + ├────────┼────────────────────┤ + 40. │ 39 │ UInt64 │ + ├────────┼────────────────────┤ + 41. │ 40 │ UInt64 │ + ├────────┼────────────────────┤ + 42. │ 41 │ UInt64 │ + ├────────┼────────────────────┤ + 43. │ 42 │ UInt64 │ + ├────────┼────────────────────┤ + 44. │ 43 │ UInt64 │ + ├────────┼────────────────────┤ + 45. │ 44 │ UInt64 │ + ├────────┼────────────────────┤ + 46. │ 45 │ UInt64 │ + ├────────┼────────────────────┤ + 47. │ 46 │ UInt64 │ + ├────────┼────────────────────┤ + 48. │ 47 │ UInt64 │ + ├────────┼────────────────────┤ + 49. │ 48 │ UInt64 │ + ├────────┼────────────────────┤ + 50. │ 49 │ UInt64 │ + ├────────┼────────────────────┤ + 51. │ 50 │ UInt64 │ + ├────────┼────────────────────┤ + 52. │ 51 │ UInt64 │ + ├────────┼────────────────────┤ + 53. │ 52 │ UInt64 │ + ├────────┼────────────────────┤ + 54. │ 53 │ UInt64 │ + ├────────┼────────────────────┤ + 55. │ 54 │ UInt64 │ + ├────────┼────────────────────┤ + 56. │ 55 │ UInt64 │ + ├────────┼────────────────────┤ + 57. │ 56 │ UInt64 │ + ├────────┼────────────────────┤ + 58. │ 57 │ UInt64 │ + ├────────┼────────────────────┤ + 59. │ 58 │ UInt64 │ + ├────────┼────────────────────┤ + 60. │ 59 │ UInt64 │ + ├────────┼────────────────────┤ + 61. │ 60 │ UInt64 │ + ├────────┼────────────────────┤ + 62. │ 61 │ UInt64 │ + ├────────┼────────────────────┤ + 63. │ 62 │ UInt64 │ + ├────────┼────────────────────┤ + 64. │ 63 │ UInt64 │ + ├────────┼────────────────────┤ + 65. │ 64 │ UInt64 │ + ├────────┼────────────────────┤ + 66. │ 65 │ UInt64 │ + ├────────┼────────────────────┤ + 67. │ 66 │ UInt64 │ + ├────────┼────────────────────┤ + 68. │ 67 │ UInt64 │ + ├────────┼────────────────────┤ + 69. │ 68 │ UInt64 │ + ├────────┼────────────────────┤ + 70. │ 69 │ UInt64 │ + ├────────┼────────────────────┤ + 71. │ 70 │ UInt64 │ + ├────────┼────────────────────┤ + 72. │ 71 │ UInt64 │ + ├────────┼────────────────────┤ + 73. │ 72 │ UInt64 │ + ├────────┼────────────────────┤ + 74. │ 73 │ UInt64 │ + ├────────┼────────────────────┤ + 75. │ 74 │ UInt64 │ + ├────────┼────────────────────┤ + 76. │ 75 │ UInt64 │ + ├────────┼────────────────────┤ + 77. │ 76 │ UInt64 │ + ├────────┼────────────────────┤ + 78. │ 77 │ UInt64 │ + ├────────┼────────────────────┤ + 79. │ 78 │ UInt64 │ + ├────────┼────────────────────┤ + 80. │ 79 │ UInt64 │ + ├────────┼────────────────────┤ + 81. │ 80 │ UInt64 │ + ├────────┼────────────────────┤ + 82. │ 81 │ UInt64 │ + ├────────┼────────────────────┤ + 83. │ 82 │ UInt64 │ + ├────────┼────────────────────┤ + 84. │ 83 │ UInt64 │ + ├────────┼────────────────────┤ + 85. │ 84 │ UInt64 │ + ├────────┼────────────────────┤ + 86. │ 85 │ UInt64 │ + ├────────┼────────────────────┤ + 87. │ 86 │ UInt64 │ + ├────────┼────────────────────┤ + 88. │ 87 │ UInt64 │ + ├────────┼────────────────────┤ + 89. │ 88 │ UInt64 │ + ├────────┼────────────────────┤ + 90. │ 89 │ UInt64 │ + ├────────┼────────────────────┤ + 91. │ 90 │ UInt64 │ + ├────────┼────────────────────┤ + 92. │ 91 │ UInt64 │ + ├────────┼────────────────────┤ + 93. │ 92 │ UInt64 │ + ├────────┼────────────────────┤ + 94. │ 93 │ UInt64 │ + ├────────┼────────────────────┤ + 95. │ 94 │ UInt64 │ + ├────────┼────────────────────┤ + 96. │ 95 │ UInt64 │ + ├────────┼────────────────────┤ + 97. │ 96 │ UInt64 │ + ├────────┼────────────────────┤ + 98. │ 97 │ UInt64 │ + ├────────┼────────────────────┤ + 99. │ 98 │ UInt64 │ + ├────────┼────────────────────┤ +100. │ 99 │ UInt64 │ + ┣━━━━━━━━╋━━━━━━━━━━━━━━━━━━━━┫ + ┃ number ┃ toTypeName(number) ┃ + ┗━━━━━━━━┻━━━━━━━━━━━━━━━━━━━━┛ + ┌─number─┬─toTypeName(number)─┐ + 1. │ 0 │ UInt64 │ + 2. │ 1 │ UInt64 │ + 3. │ 2 │ UInt64 │ + 4. │ 3 │ UInt64 │ + 5. │ 4 │ UInt64 │ + 6. │ 5 │ UInt64 │ + 7. │ 6 │ UInt64 │ + 8. │ 7 │ UInt64 │ + 9. │ 8 │ UInt64 │ + 10. │ 9 │ UInt64 │ + 11. │ 10 │ UInt64 │ + 12. │ 11 │ UInt64 │ + 13. │ 12 │ UInt64 │ + 14. │ 13 │ UInt64 │ + 15. │ 14 │ UInt64 │ + 16. │ 15 │ UInt64 │ + 17. │ 16 │ UInt64 │ + 18. │ 17 │ UInt64 │ + 19. │ 18 │ UInt64 │ + 20. │ 19 │ UInt64 │ + 21. │ 20 │ UInt64 │ + 22. │ 21 │ UInt64 │ + 23. │ 22 │ UInt64 │ + 24. │ 23 │ UInt64 │ + 25. │ 24 │ UInt64 │ + 26. │ 25 │ UInt64 │ + 27. │ 26 │ UInt64 │ + 28. │ 27 │ UInt64 │ + 29. │ 28 │ UInt64 │ + 30. │ 29 │ UInt64 │ + 31. │ 30 │ UInt64 │ + 32. │ 31 │ UInt64 │ + 33. │ 32 │ UInt64 │ + 34. │ 33 │ UInt64 │ + 35. │ 34 │ UInt64 │ + 36. │ 35 │ UInt64 │ + 37. │ 36 │ UInt64 │ + 38. │ 37 │ UInt64 │ + 39. │ 38 │ UInt64 │ + 40. │ 39 │ UInt64 │ + 41. │ 40 │ UInt64 │ + 42. │ 41 │ UInt64 │ + 43. │ 42 │ UInt64 │ + 44. │ 43 │ UInt64 │ + 45. │ 44 │ UInt64 │ + 46. │ 45 │ UInt64 │ + 47. │ 46 │ UInt64 │ + 48. │ 47 │ UInt64 │ + 49. │ 48 │ UInt64 │ + 50. │ 49 │ UInt64 │ + 51. │ 50 │ UInt64 │ + 52. │ 51 │ UInt64 │ + 53. │ 52 │ UInt64 │ + 54. │ 53 │ UInt64 │ + 55. │ 54 │ UInt64 │ + 56. │ 55 │ UInt64 │ + 57. │ 56 │ UInt64 │ + 58. │ 57 │ UInt64 │ + 59. │ 58 │ UInt64 │ + 60. │ 59 │ UInt64 │ + 61. │ 60 │ UInt64 │ + 62. │ 61 │ UInt64 │ + 63. │ 62 │ UInt64 │ + 64. │ 63 │ UInt64 │ + 65. │ 64 │ UInt64 │ + 66. │ 65 │ UInt64 │ + 67. │ 66 │ UInt64 │ + 68. │ 67 │ UInt64 │ + 69. │ 68 │ UInt64 │ + 70. │ 69 │ UInt64 │ + 71. │ 70 │ UInt64 │ + 72. │ 71 │ UInt64 │ + 73. │ 72 │ UInt64 │ + 74. │ 73 │ UInt64 │ + 75. │ 74 │ UInt64 │ + 76. │ 75 │ UInt64 │ + 77. │ 76 │ UInt64 │ + 78. │ 77 │ UInt64 │ + 79. │ 78 │ UInt64 │ + 80. │ 79 │ UInt64 │ + 81. │ 80 │ UInt64 │ + 82. │ 81 │ UInt64 │ + 83. │ 82 │ UInt64 │ + 84. │ 83 │ UInt64 │ + 85. │ 84 │ UInt64 │ + 86. │ 85 │ UInt64 │ + 87. │ 86 │ UInt64 │ + 88. │ 87 │ UInt64 │ + 89. │ 88 │ UInt64 │ + 90. │ 89 │ UInt64 │ + 91. │ 90 │ UInt64 │ + 92. │ 91 │ UInt64 │ + 93. │ 92 │ UInt64 │ + 94. │ 93 │ UInt64 │ + 95. │ 94 │ UInt64 │ + 96. │ 95 │ UInt64 │ + 97. │ 96 │ UInt64 │ + 98. │ 97 │ UInt64 │ + 99. │ 98 │ UInt64 │ +100. │ 99 │ UInt64 │ + └────────┴────────────────────┘ + ┌─number─┬─toTypeName(number)─┐ + 1. │ 0 │ UInt64 │ + 2. │ 1 │ UInt64 │ + 3. │ 2 │ UInt64 │ + 4. │ 3 │ UInt64 │ + 5. │ 4 │ UInt64 │ + 6. │ 5 │ UInt64 │ + 7. │ 6 │ UInt64 │ + 8. │ 7 │ UInt64 │ + 9. │ 8 │ UInt64 │ + 10. │ 9 │ UInt64 │ + 11. │ 10 │ UInt64 │ + 12. │ 11 │ UInt64 │ + 13. │ 12 │ UInt64 │ + 14. │ 13 │ UInt64 │ + 15. │ 14 │ UInt64 │ + 16. │ 15 │ UInt64 │ + 17. │ 16 │ UInt64 │ + 18. │ 17 │ UInt64 │ + 19. │ 18 │ UInt64 │ + 20. │ 19 │ UInt64 │ + 21. │ 20 │ UInt64 │ + 22. │ 21 │ UInt64 │ + 23. │ 22 │ UInt64 │ + 24. │ 23 │ UInt64 │ + 25. │ 24 │ UInt64 │ + 26. │ 25 │ UInt64 │ + 27. │ 26 │ UInt64 │ + 28. │ 27 │ UInt64 │ + 29. │ 28 │ UInt64 │ + 30. │ 29 │ UInt64 │ + 31. │ 30 │ UInt64 │ + 32. │ 31 │ UInt64 │ + 33. │ 32 │ UInt64 │ + 34. │ 33 │ UInt64 │ + 35. │ 34 │ UInt64 │ + 36. │ 35 │ UInt64 │ + 37. │ 36 │ UInt64 │ + 38. │ 37 │ UInt64 │ + 39. │ 38 │ UInt64 │ + 40. │ 39 │ UInt64 │ + 41. │ 40 │ UInt64 │ + 42. │ 41 │ UInt64 │ + 43. │ 42 │ UInt64 │ + 44. │ 43 │ UInt64 │ + 45. │ 44 │ UInt64 │ + 46. │ 45 │ UInt64 │ + 47. │ 46 │ UInt64 │ + 48. │ 47 │ UInt64 │ + 49. │ 48 │ UInt64 │ + 50. │ 49 │ UInt64 │ + 51. │ 50 │ UInt64 │ + 52. │ 51 │ UInt64 │ + 53. │ 52 │ UInt64 │ + 54. │ 53 │ UInt64 │ + 55. │ 54 │ UInt64 │ + 56. │ 55 │ UInt64 │ + 57. │ 56 │ UInt64 │ + 58. │ 57 │ UInt64 │ + 59. │ 58 │ UInt64 │ + 60. │ 59 │ UInt64 │ + 61. │ 60 │ UInt64 │ + 62. │ 61 │ UInt64 │ + 63. │ 62 │ UInt64 │ + 64. │ 63 │ UInt64 │ + 65. │ 64 │ UInt64 │ + 66. │ 65 │ UInt64 │ + 67. │ 66 │ UInt64 │ + 68. │ 67 │ UInt64 │ + 69. │ 68 │ UInt64 │ + 70. │ 69 │ UInt64 │ + 71. │ 70 │ UInt64 │ + 72. │ 71 │ UInt64 │ + 73. │ 72 │ UInt64 │ + 74. │ 73 │ UInt64 │ + 75. │ 74 │ UInt64 │ + 76. │ 75 │ UInt64 │ + 77. │ 76 │ UInt64 │ + 78. │ 77 │ UInt64 │ + 79. │ 78 │ UInt64 │ + 80. │ 79 │ UInt64 │ + 81. │ 80 │ UInt64 │ + 82. │ 81 │ UInt64 │ + 83. │ 82 │ UInt64 │ + 84. │ 83 │ UInt64 │ + 85. │ 84 │ UInt64 │ + 86. │ 85 │ UInt64 │ + 87. │ 86 │ UInt64 │ + 88. │ 87 │ UInt64 │ + 89. │ 88 │ UInt64 │ + 90. │ 89 │ UInt64 │ + 91. │ 90 │ UInt64 │ + 92. │ 91 │ UInt64 │ + 93. │ 92 │ UInt64 │ + 94. │ 93 │ UInt64 │ + 95. │ 94 │ UInt64 │ + 96. │ 95 │ UInt64 │ + 97. │ 96 │ UInt64 │ + 98. │ 97 │ UInt64 │ + 99. │ 98 │ UInt64 │ +100. │ 99 │ UInt64 │ + └─number─┴─toTypeName(number)─┘ + ┌─number─┬─toTypeName(number)─┐ + 1. │ 0 │ UInt64 │ + 2. │ 1 │ UInt64 │ + 3. │ 2 │ UInt64 │ + 4. │ 3 │ UInt64 │ + 5. │ 4 │ UInt64 │ + 6. │ 5 │ UInt64 │ + 7. │ 6 │ UInt64 │ + 8. │ 7 │ UInt64 │ + 9. │ 8 │ UInt64 │ + 10. │ 9 │ UInt64 │ + 11. │ 10 │ UInt64 │ + 12. │ 11 │ UInt64 │ + 13. │ 12 │ UInt64 │ + 14. │ 13 │ UInt64 │ + 15. │ 14 │ UInt64 │ + 16. │ 15 │ UInt64 │ + 17. │ 16 │ UInt64 │ + 18. │ 17 │ UInt64 │ + 19. │ 18 │ UInt64 │ + 20. │ 19 │ UInt64 │ + 21. │ 20 │ UInt64 │ + 22. │ 21 │ UInt64 │ + 23. │ 22 │ UInt64 │ + 24. │ 23 │ UInt64 │ + 25. │ 24 │ UInt64 │ + 26. │ 25 │ UInt64 │ + 27. │ 26 │ UInt64 │ + 28. │ 27 │ UInt64 │ + 29. │ 28 │ UInt64 │ + 30. │ 29 │ UInt64 │ + 31. │ 30 │ UInt64 │ + 32. │ 31 │ UInt64 │ + 33. │ 32 │ UInt64 │ + 34. │ 33 │ UInt64 │ + 35. │ 34 │ UInt64 │ + 36. │ 35 │ UInt64 │ + 37. │ 36 │ UInt64 │ + 38. │ 37 │ UInt64 │ + 39. │ 38 │ UInt64 │ + 40. │ 39 │ UInt64 │ + 41. │ 40 │ UInt64 │ + 42. │ 41 │ UInt64 │ + 43. │ 42 │ UInt64 │ + 44. │ 43 │ UInt64 │ + 45. │ 44 │ UInt64 │ + 46. │ 45 │ UInt64 │ + 47. │ 46 │ UInt64 │ + 48. │ 47 │ UInt64 │ + 49. │ 48 │ UInt64 │ + 50. │ 49 │ UInt64 │ + 51. │ 50 │ UInt64 │ + 52. │ 51 │ UInt64 │ + 53. │ 52 │ UInt64 │ + 54. │ 53 │ UInt64 │ + 55. │ 54 │ UInt64 │ + 56. │ 55 │ UInt64 │ + 57. │ 56 │ UInt64 │ + 58. │ 57 │ UInt64 │ + 59. │ 58 │ UInt64 │ + 60. │ 59 │ UInt64 │ + 61. │ 60 │ UInt64 │ + 62. │ 61 │ UInt64 │ + 63. │ 62 │ UInt64 │ + 64. │ 63 │ UInt64 │ + 65. │ 64 │ UInt64 │ + 66. │ 65 │ UInt64 │ + 67. │ 66 │ UInt64 │ + 68. │ 67 │ UInt64 │ + 69. │ 68 │ UInt64 │ + 70. │ 69 │ UInt64 │ + 71. │ 70 │ UInt64 │ + 72. │ 71 │ UInt64 │ + 73. │ 72 │ UInt64 │ + 74. │ 73 │ UInt64 │ + 75. │ 74 │ UInt64 │ + 76. │ 75 │ UInt64 │ + 77. │ 76 │ UInt64 │ + 78. │ 77 │ UInt64 │ + 79. │ 78 │ UInt64 │ + 80. │ 79 │ UInt64 │ + 81. │ 80 │ UInt64 │ + 82. │ 81 │ UInt64 │ + 83. │ 82 │ UInt64 │ + 84. │ 83 │ UInt64 │ + 85. │ 84 │ UInt64 │ + 86. │ 85 │ UInt64 │ + 87. │ 86 │ UInt64 │ + 88. │ 87 │ UInt64 │ + 89. │ 88 │ UInt64 │ + 90. │ 89 │ UInt64 │ + 91. │ 90 │ UInt64 │ + 92. │ 91 │ UInt64 │ + 93. │ 92 │ UInt64 │ + 94. │ 93 │ UInt64 │ + 95. │ 94 │ UInt64 │ + 96. │ 95 │ UInt64 │ + 97. │ 96 │ UInt64 │ + 98. │ 97 │ UInt64 │ + 99. │ 98 │ UInt64 │ +100. │ 99 │ UInt64 │ + └─number─┴─toTypeName(number)─┘ + ┌─number─┬─toTypeName(number)─┐ + 1. │ 0 │ UInt64 │ + 2. │ 1 │ UInt64 │ + 3. │ 2 │ UInt64 │ + 4. │ 3 │ UInt64 │ + 5. │ 4 │ UInt64 │ + 6. │ 5 │ UInt64 │ + 7. │ 6 │ UInt64 │ + 8. │ 7 │ UInt64 │ + 9. │ 8 │ UInt64 │ + 10. │ 9 │ UInt64 │ + 11. │ 10 │ UInt64 │ + 12. │ 11 │ UInt64 │ + 13. │ 12 │ UInt64 │ + 14. │ 13 │ UInt64 │ + 15. │ 14 │ UInt64 │ + 16. │ 15 │ UInt64 │ + 17. │ 16 │ UInt64 │ + 18. │ 17 │ UInt64 │ + 19. │ 18 │ UInt64 │ + 20. │ 19 │ UInt64 │ + 21. │ 20 │ UInt64 │ + 22. │ 21 │ UInt64 │ + 23. │ 22 │ UInt64 │ + 24. │ 23 │ UInt64 │ + 25. │ 24 │ UInt64 │ + 26. │ 25 │ UInt64 │ + 27. │ 26 │ UInt64 │ + 28. │ 27 │ UInt64 │ + 29. │ 28 │ UInt64 │ + 30. │ 29 │ UInt64 │ + 31. │ 30 │ UInt64 │ + 32. │ 31 │ UInt64 │ + 33. │ 32 │ UInt64 │ + 34. │ 33 │ UInt64 │ + 35. │ 34 │ UInt64 │ + 36. │ 35 │ UInt64 │ + 37. │ 36 │ UInt64 │ + 38. │ 37 │ UInt64 │ + 39. │ 38 │ UInt64 │ + 40. │ 39 │ UInt64 │ + 41. │ 40 │ UInt64 │ + 42. │ 41 │ UInt64 │ + 43. │ 42 │ UInt64 │ + 44. │ 43 │ UInt64 │ + 45. │ 44 │ UInt64 │ + 46. │ 45 │ UInt64 │ + 47. │ 46 │ UInt64 │ + 48. │ 47 │ UInt64 │ + 49. │ 48 │ UInt64 │ + 50. │ 49 │ UInt64 │ + 51. │ 50 │ UInt64 │ + 52. │ 51 │ UInt64 │ + 53. │ 52 │ UInt64 │ + 54. │ 53 │ UInt64 │ + 55. │ 54 │ UInt64 │ + 56. │ 55 │ UInt64 │ + 57. │ 56 │ UInt64 │ + 58. │ 57 │ UInt64 │ + 59. │ 58 │ UInt64 │ + 60. │ 59 │ UInt64 │ + 61. │ 60 │ UInt64 │ + 62. │ 61 │ UInt64 │ + 63. │ 62 │ UInt64 │ + 64. │ 63 │ UInt64 │ + 65. │ 64 │ UInt64 │ + 66. │ 65 │ UInt64 │ + 67. │ 66 │ UInt64 │ + 68. │ 67 │ UInt64 │ + 69. │ 68 │ UInt64 │ + 70. │ 69 │ UInt64 │ + 71. │ 70 │ UInt64 │ + 72. │ 71 │ UInt64 │ + 73. │ 72 │ UInt64 │ + 74. │ 73 │ UInt64 │ + 75. │ 74 │ UInt64 │ + 76. │ 75 │ UInt64 │ + 77. │ 76 │ UInt64 │ + 78. │ 77 │ UInt64 │ + 79. │ 78 │ UInt64 │ + 80. │ 79 │ UInt64 │ + 81. │ 80 │ UInt64 │ + 82. │ 81 │ UInt64 │ + 83. │ 82 │ UInt64 │ + 84. │ 83 │ UInt64 │ + 85. │ 84 │ UInt64 │ + 86. │ 85 │ UInt64 │ + 87. │ 86 │ UInt64 │ + 88. │ 87 │ UInt64 │ + 89. │ 88 │ UInt64 │ + 90. │ 89 │ UInt64 │ + 91. │ 90 │ UInt64 │ + 92. │ 91 │ UInt64 │ + 93. │ 92 │ UInt64 │ + 94. │ 93 │ UInt64 │ + 95. │ 94 │ UInt64 │ + 96. │ 95 │ UInt64 │ + 97. │ 96 │ UInt64 │ + 98. │ 97 │ UInt64 │ + 99. │ 98 │ UInt64 │ +100. │ 99 │ UInt64 │ + └─number─┴─toTypeName(number)─┘ + ┌─number─┬─toTypeName(number)─┐ + 1. │ 0 │ UInt64 │ + 2. │ 1 │ UInt64 │ + 3. │ 2 │ UInt64 │ + 4. │ 3 │ UInt64 │ + 5. │ 4 │ UInt64 │ + 6. │ 5 │ UInt64 │ + 7. │ 6 │ UInt64 │ + 8. │ 7 │ UInt64 │ + 9. │ 8 │ UInt64 │ + 10. │ 9 │ UInt64 │ + 11. │ 10 │ UInt64 │ + 12. │ 11 │ UInt64 │ + 13. │ 12 │ UInt64 │ + 14. │ 13 │ UInt64 │ + 15. │ 14 │ UInt64 │ + 16. │ 15 │ UInt64 │ + 17. │ 16 │ UInt64 │ + 18. │ 17 │ UInt64 │ + 19. │ 18 │ UInt64 │ + 20. │ 19 │ UInt64 │ + 21. │ 20 │ UInt64 │ + 22. │ 21 │ UInt64 │ + 23. │ 22 │ UInt64 │ + 24. │ 23 │ UInt64 │ + 25. │ 24 │ UInt64 │ + 26. │ 25 │ UInt64 │ + 27. │ 26 │ UInt64 │ + 28. │ 27 │ UInt64 │ + 29. │ 28 │ UInt64 │ + 30. │ 29 │ UInt64 │ + 31. │ 30 │ UInt64 │ + 32. │ 31 │ UInt64 │ + 33. │ 32 │ UInt64 │ + 34. │ 33 │ UInt64 │ + 35. │ 34 │ UInt64 │ + 36. │ 35 │ UInt64 │ + 37. │ 36 │ UInt64 │ + 38. │ 37 │ UInt64 │ + 39. │ 38 │ UInt64 │ + 40. │ 39 │ UInt64 │ + 41. │ 40 │ UInt64 │ + 42. │ 41 │ UInt64 │ + 43. │ 42 │ UInt64 │ + 44. │ 43 │ UInt64 │ + 45. │ 44 │ UInt64 │ + 46. │ 45 │ UInt64 │ + 47. │ 46 │ UInt64 │ + 48. │ 47 │ UInt64 │ + 49. │ 48 │ UInt64 │ + 50. │ 49 │ UInt64 │ + 51. │ 50 │ UInt64 │ + 52. │ 51 │ UInt64 │ + 53. │ 52 │ UInt64 │ + 54. │ 53 │ UInt64 │ + 55. │ 54 │ UInt64 │ + 56. │ 55 │ UInt64 │ + 57. │ 56 │ UInt64 │ + 58. │ 57 │ UInt64 │ + 59. │ 58 │ UInt64 │ + 60. │ 59 │ UInt64 │ + 61. │ 60 │ UInt64 │ + 62. │ 61 │ UInt64 │ + 63. │ 62 │ UInt64 │ + 64. │ 63 │ UInt64 │ + 65. │ 64 │ UInt64 │ + 66. │ 65 │ UInt64 │ + 67. │ 66 │ UInt64 │ + 68. │ 67 │ UInt64 │ + 69. │ 68 │ UInt64 │ + 70. │ 69 │ UInt64 │ + 71. │ 70 │ UInt64 │ + 72. │ 71 │ UInt64 │ + 73. │ 72 │ UInt64 │ + 74. │ 73 │ UInt64 │ + 75. │ 74 │ UInt64 │ + 76. │ 75 │ UInt64 │ + 77. │ 76 │ UInt64 │ + 78. │ 77 │ UInt64 │ + 79. │ 78 │ UInt64 │ + 80. │ 79 │ UInt64 │ + 81. │ 80 │ UInt64 │ + 82. │ 81 │ UInt64 │ + 83. │ 82 │ UInt64 │ + 84. │ 83 │ UInt64 │ + 85. │ 84 │ UInt64 │ + 86. │ 85 │ UInt64 │ + 87. │ 86 │ UInt64 │ + 88. │ 87 │ UInt64 │ + 89. │ 88 │ UInt64 │ + 90. │ 89 │ UInt64 │ + 91. │ 90 │ UInt64 │ + 92. │ 91 │ UInt64 │ + 93. │ 92 │ UInt64 │ + 94. │ 93 │ UInt64 │ + 95. │ 94 │ UInt64 │ + 96. │ 95 │ UInt64 │ + 97. │ 96 │ UInt64 │ + 98. │ 97 │ UInt64 │ + 99. │ 98 │ UInt64 │ +100. │ 99 │ UInt64 │ + └─number─┴─toTypeName(number)─┘ + number toTypeName(number) + + 1. 0 UInt64 + 2. 1 UInt64 + 3. 2 UInt64 + 4. 3 UInt64 + 5. 4 UInt64 + 6. 5 UInt64 + 7. 6 UInt64 + 8. 7 UInt64 + 9. 8 UInt64 + 10. 9 UInt64 + 11. 10 UInt64 + 12. 11 UInt64 + 13. 12 UInt64 + 14. 13 UInt64 + 15. 14 UInt64 + 16. 15 UInt64 + 17. 16 UInt64 + 18. 17 UInt64 + 19. 18 UInt64 + 20. 19 UInt64 + 21. 20 UInt64 + 22. 21 UInt64 + 23. 22 UInt64 + 24. 23 UInt64 + 25. 24 UInt64 + 26. 25 UInt64 + 27. 26 UInt64 + 28. 27 UInt64 + 29. 28 UInt64 + 30. 29 UInt64 + 31. 30 UInt64 + 32. 31 UInt64 + 33. 32 UInt64 + 34. 33 UInt64 + 35. 34 UInt64 + 36. 35 UInt64 + 37. 36 UInt64 + 38. 37 UInt64 + 39. 38 UInt64 + 40. 39 UInt64 + 41. 40 UInt64 + 42. 41 UInt64 + 43. 42 UInt64 + 44. 43 UInt64 + 45. 44 UInt64 + 46. 45 UInt64 + 47. 46 UInt64 + 48. 47 UInt64 + 49. 48 UInt64 + 50. 49 UInt64 + 51. 50 UInt64 + 52. 51 UInt64 + 53. 52 UInt64 + 54. 53 UInt64 + 55. 54 UInt64 + 56. 55 UInt64 + 57. 56 UInt64 + 58. 57 UInt64 + 59. 58 UInt64 + 60. 59 UInt64 + 61. 60 UInt64 + 62. 61 UInt64 + 63. 62 UInt64 + 64. 63 UInt64 + 65. 64 UInt64 + 66. 65 UInt64 + 67. 66 UInt64 + 68. 67 UInt64 + 69. 68 UInt64 + 70. 69 UInt64 + 71. 70 UInt64 + 72. 71 UInt64 + 73. 72 UInt64 + 74. 73 UInt64 + 75. 74 UInt64 + 76. 75 UInt64 + 77. 76 UInt64 + 78. 77 UInt64 + 79. 78 UInt64 + 80. 79 UInt64 + 81. 80 UInt64 + 82. 81 UInt64 + 83. 82 UInt64 + 84. 83 UInt64 + 85. 84 UInt64 + 86. 85 UInt64 + 87. 86 UInt64 + 88. 87 UInt64 + 89. 88 UInt64 + 90. 89 UInt64 + 91. 90 UInt64 + 92. 91 UInt64 + 93. 92 UInt64 + 94. 93 UInt64 + 95. 94 UInt64 + 96. 95 UInt64 + 97. 96 UInt64 + 98. 97 UInt64 + 99. 98 UInt64 +100. 99 UInt64 + number toTypeName(number) + + 1. 0 UInt64 + 2. 1 UInt64 + 3. 2 UInt64 + 4. 3 UInt64 + 5. 4 UInt64 + 6. 5 UInt64 + 7. 6 UInt64 + 8. 7 UInt64 + 9. 8 UInt64 + 10. 9 UInt64 + 11. 10 UInt64 + 12. 11 UInt64 + 13. 12 UInt64 + 14. 13 UInt64 + 15. 14 UInt64 + 16. 15 UInt64 + 17. 16 UInt64 + 18. 17 UInt64 + 19. 18 UInt64 + 20. 19 UInt64 + 21. 20 UInt64 + 22. 21 UInt64 + 23. 22 UInt64 + 24. 23 UInt64 + 25. 24 UInt64 + 26. 25 UInt64 + 27. 26 UInt64 + 28. 27 UInt64 + 29. 28 UInt64 + 30. 29 UInt64 + 31. 30 UInt64 + 32. 31 UInt64 + 33. 32 UInt64 + 34. 33 UInt64 + 35. 34 UInt64 + 36. 35 UInt64 + 37. 36 UInt64 + 38. 37 UInt64 + 39. 38 UInt64 + 40. 39 UInt64 + 41. 40 UInt64 + 42. 41 UInt64 + 43. 42 UInt64 + 44. 43 UInt64 + 45. 44 UInt64 + 46. 45 UInt64 + 47. 46 UInt64 + 48. 47 UInt64 + 49. 48 UInt64 + 50. 49 UInt64 + 51. 50 UInt64 + 52. 51 UInt64 + 53. 52 UInt64 + 54. 53 UInt64 + 55. 54 UInt64 + 56. 55 UInt64 + 57. 56 UInt64 + 58. 57 UInt64 + 59. 58 UInt64 + 60. 59 UInt64 + 61. 60 UInt64 + 62. 61 UInt64 + 63. 62 UInt64 + 64. 63 UInt64 + 65. 64 UInt64 + 66. 65 UInt64 + 67. 66 UInt64 + 68. 67 UInt64 + 69. 68 UInt64 + 70. 69 UInt64 + 71. 70 UInt64 + 72. 71 UInt64 + 73. 72 UInt64 + 74. 73 UInt64 + 75. 74 UInt64 + 76. 75 UInt64 + 77. 76 UInt64 + 78. 77 UInt64 + 79. 78 UInt64 + 80. 79 UInt64 + 81. 80 UInt64 + 82. 81 UInt64 + 83. 82 UInt64 + 84. 83 UInt64 + 85. 84 UInt64 + 86. 85 UInt64 + 87. 86 UInt64 + 88. 87 UInt64 + 89. 88 UInt64 + 90. 89 UInt64 + 91. 90 UInt64 + 92. 91 UInt64 + 93. 92 UInt64 + 94. 93 UInt64 + 95. 94 UInt64 + 96. 95 UInt64 + 97. 96 UInt64 + 98. 97 UInt64 + 99. 98 UInt64 +100. 99 UInt64 + + number toTypeName(number) + number toTypeName(number) + + 1. 0 UInt64 + 2. 1 UInt64 + 3. 2 UInt64 + 4. 3 UInt64 + 5. 4 UInt64 + 6. 5 UInt64 + 7. 6 UInt64 + 8. 7 UInt64 + 9. 8 UInt64 + 10. 9 UInt64 + 11. 10 UInt64 + 12. 11 UInt64 + 13. 12 UInt64 + 14. 13 UInt64 + 15. 14 UInt64 + 16. 15 UInt64 + 17. 16 UInt64 + 18. 17 UInt64 + 19. 18 UInt64 + 20. 19 UInt64 + 21. 20 UInt64 + 22. 21 UInt64 + 23. 22 UInt64 + 24. 23 UInt64 + 25. 24 UInt64 + 26. 25 UInt64 + 27. 26 UInt64 + 28. 27 UInt64 + 29. 28 UInt64 + 30. 29 UInt64 + 31. 30 UInt64 + 32. 31 UInt64 + 33. 32 UInt64 + 34. 33 UInt64 + 35. 34 UInt64 + 36. 35 UInt64 + 37. 36 UInt64 + 38. 37 UInt64 + 39. 38 UInt64 + 40. 39 UInt64 + 41. 40 UInt64 + 42. 41 UInt64 + 43. 42 UInt64 + 44. 43 UInt64 + 45. 44 UInt64 + 46. 45 UInt64 + 47. 46 UInt64 + 48. 47 UInt64 + 49. 48 UInt64 + 50. 49 UInt64 + 51. 50 UInt64 + 52. 51 UInt64 + 53. 52 UInt64 + 54. 53 UInt64 + 55. 54 UInt64 + 56. 55 UInt64 + 57. 56 UInt64 + 58. 57 UInt64 + 59. 58 UInt64 + 60. 59 UInt64 + 61. 60 UInt64 + 62. 61 UInt64 + 63. 62 UInt64 + 64. 63 UInt64 + 65. 64 UInt64 + 66. 65 UInt64 + 67. 66 UInt64 + 68. 67 UInt64 + 69. 68 UInt64 + 70. 69 UInt64 + 71. 70 UInt64 + 72. 71 UInt64 + 73. 72 UInt64 + 74. 73 UInt64 + 75. 74 UInt64 + 76. 75 UInt64 + 77. 76 UInt64 + 78. 77 UInt64 + 79. 78 UInt64 + 80. 79 UInt64 + 81. 80 UInt64 + 82. 81 UInt64 + 83. 82 UInt64 + 84. 83 UInt64 + 85. 84 UInt64 + 86. 85 UInt64 + 87. 86 UInt64 + 88. 87 UInt64 + 89. 88 UInt64 + 90. 89 UInt64 + 91. 90 UInt64 + 92. 91 UInt64 + 93. 92 UInt64 + 94. 93 UInt64 + 95. 94 UInt64 + 96. 95 UInt64 + 97. 96 UInt64 + 98. 97 UInt64 + 99. 98 UInt64 +100. 99 UInt64 + + number toTypeName(number) + number toTypeName(number) + + 1. 0 UInt64 + 2. 1 UInt64 + 3. 2 UInt64 + 4. 3 UInt64 + 5. 4 UInt64 + 6. 5 UInt64 + 7. 6 UInt64 + 8. 7 UInt64 + 9. 8 UInt64 + 10. 9 UInt64 + 11. 10 UInt64 + 12. 11 UInt64 + 13. 12 UInt64 + 14. 13 UInt64 + 15. 14 UInt64 + 16. 15 UInt64 + 17. 16 UInt64 + 18. 17 UInt64 + 19. 18 UInt64 + 20. 19 UInt64 + 21. 20 UInt64 + 22. 21 UInt64 + 23. 22 UInt64 + 24. 23 UInt64 + 25. 24 UInt64 + 26. 25 UInt64 + 27. 26 UInt64 + 28. 27 UInt64 + 29. 28 UInt64 + 30. 29 UInt64 + 31. 30 UInt64 + 32. 31 UInt64 + 33. 32 UInt64 + 34. 33 UInt64 + 35. 34 UInt64 + 36. 35 UInt64 + 37. 36 UInt64 + 38. 37 UInt64 + 39. 38 UInt64 + 40. 39 UInt64 + 41. 40 UInt64 + 42. 41 UInt64 + 43. 42 UInt64 + 44. 43 UInt64 + 45. 44 UInt64 + 46. 45 UInt64 + 47. 46 UInt64 + 48. 47 UInt64 + 49. 48 UInt64 + 50. 49 UInt64 + 51. 50 UInt64 + 52. 51 UInt64 + 53. 52 UInt64 + 54. 53 UInt64 + 55. 54 UInt64 + 56. 55 UInt64 + 57. 56 UInt64 + 58. 57 UInt64 + 59. 58 UInt64 + 60. 59 UInt64 + 61. 60 UInt64 + 62. 61 UInt64 + 63. 62 UInt64 + 64. 63 UInt64 + 65. 64 UInt64 + 66. 65 UInt64 + 67. 66 UInt64 + 68. 67 UInt64 + 69. 68 UInt64 + 70. 69 UInt64 + 71. 70 UInt64 + 72. 71 UInt64 + 73. 72 UInt64 + 74. 73 UInt64 + 75. 74 UInt64 + 76. 75 UInt64 + 77. 76 UInt64 + 78. 77 UInt64 + 79. 78 UInt64 + 80. 79 UInt64 + 81. 80 UInt64 + 82. 81 UInt64 + 83. 82 UInt64 + 84. 83 UInt64 + 85. 84 UInt64 + 86. 85 UInt64 + 87. 86 UInt64 + 88. 87 UInt64 + 89. 88 UInt64 + 90. 89 UInt64 + 91. 90 UInt64 + 92. 91 UInt64 + 93. 92 UInt64 + 94. 93 UInt64 + 95. 94 UInt64 + 96. 95 UInt64 + 97. 96 UInt64 + 98. 97 UInt64 + 99. 98 UInt64 +100. 99 UInt64 + + number toTypeName(number) + number toTypeName(number) + + 1. 0 UInt64 + 2. 1 UInt64 + 3. 2 UInt64 + 4. 3 UInt64 + 5. 4 UInt64 + 6. 5 UInt64 + 7. 6 UInt64 + 8. 7 UInt64 + 9. 8 UInt64 + 10. 9 UInt64 + 11. 10 UInt64 + 12. 11 UInt64 + 13. 12 UInt64 + 14. 13 UInt64 + 15. 14 UInt64 + 16. 15 UInt64 + 17. 16 UInt64 + 18. 17 UInt64 + 19. 18 UInt64 + 20. 19 UInt64 + 21. 20 UInt64 + 22. 21 UInt64 + 23. 22 UInt64 + 24. 23 UInt64 + 25. 24 UInt64 + 26. 25 UInt64 + 27. 26 UInt64 + 28. 27 UInt64 + 29. 28 UInt64 + 30. 29 UInt64 + 31. 30 UInt64 + 32. 31 UInt64 + 33. 32 UInt64 + 34. 33 UInt64 + 35. 34 UInt64 + 36. 35 UInt64 + 37. 36 UInt64 + 38. 37 UInt64 + 39. 38 UInt64 + 40. 39 UInt64 + 41. 40 UInt64 + 42. 41 UInt64 + 43. 42 UInt64 + 44. 43 UInt64 + 45. 44 UInt64 + 46. 45 UInt64 + 47. 46 UInt64 + 48. 47 UInt64 + 49. 48 UInt64 + 50. 49 UInt64 + 51. 50 UInt64 + 52. 51 UInt64 + 53. 52 UInt64 + 54. 53 UInt64 + 55. 54 UInt64 + 56. 55 UInt64 + 57. 56 UInt64 + 58. 57 UInt64 + 59. 58 UInt64 + 60. 59 UInt64 + 61. 60 UInt64 + 62. 61 UInt64 + 63. 62 UInt64 + 64. 63 UInt64 + 65. 64 UInt64 + 66. 65 UInt64 + 67. 66 UInt64 + 68. 67 UInt64 + 69. 68 UInt64 + 70. 69 UInt64 + 71. 70 UInt64 + 72. 71 UInt64 + 73. 72 UInt64 + 74. 73 UInt64 + 75. 74 UInt64 + 76. 75 UInt64 + 77. 76 UInt64 + 78. 77 UInt64 + 79. 78 UInt64 + 80. 79 UInt64 + 81. 80 UInt64 + 82. 81 UInt64 + 83. 82 UInt64 + 84. 83 UInt64 + 85. 84 UInt64 + 86. 85 UInt64 + 87. 86 UInt64 + 88. 87 UInt64 + 89. 88 UInt64 + 90. 89 UInt64 + 91. 90 UInt64 + 92. 91 UInt64 + 93. 92 UInt64 + 94. 93 UInt64 + 95. 94 UInt64 + 96. 95 UInt64 + 97. 96 UInt64 + 98. 97 UInt64 + 99. 98 UInt64 +100. 99 UInt64 + + number toTypeName(number) diff --git a/tests/queries/0_stateless/03169_display_column_names_in_footer.sql b/tests/queries/0_stateless/03169_display_column_names_in_footer.sql new file mode 100644 index 00000000000..9e4ec09c21c --- /dev/null +++ b/tests/queries/0_stateless/03169_display_column_names_in_footer.sql @@ -0,0 +1,19 @@ +-- https://github.com/ClickHouse/ClickHouse/issues/65035 +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 49) FORMAT Pretty; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 10) FORMAT Pretty SETTINGS output_format_pretty_display_footer_column_names_min_rows=9; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT Pretty SETTINGS output_format_pretty_display_footer_column_names=0; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT Pretty; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyNoEscapes; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyMonoBlock; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyNoEscapesMonoBlock; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyNoEscapesMonoBlock; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyCompact SETTINGS output_format_pretty_display_footer_column_names=0; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyCompact; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyCompactNoEscapes; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyCompactMonoBlock; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettyCompactNoEscapesMonoBlock; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettySpace SETTINGS output_format_pretty_display_footer_column_names=0; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettySpace; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettySpaceNoEscapes; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettySpaceMonoBlock; +SELECT *, toTypeName(*) FROM (SELECT * FROM system.numbers LIMIT 100) FORMAT PrettySpaceNoEscapesMonoBlock; diff --git a/tests/queries/0_stateless/03171_direct_dict_short_circuit_bug.reference b/tests/queries/0_stateless/03171_direct_dict_short_circuit_bug.reference new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/queries/0_stateless/03171_direct_dict_short_circuit_bug.sql b/tests/queries/0_stateless/03171_direct_dict_short_circuit_bug.sql new file mode 100644 index 00000000000..034d48d2b8b --- /dev/null +++ b/tests/queries/0_stateless/03171_direct_dict_short_circuit_bug.sql @@ -0,0 +1,37 @@ +-- https://github.com/ClickHouse/ClickHouse/issues/65201 +SET short_circuit_function_evaluation='enable'; + +DROP DICTIONARY IF EXISTS direct_dictionary_simple_key_simple_attributes; +DROP TABLE IF EXISTS simple_key_simple_attributes_source_table; + +CREATE TABLE simple_key_simple_attributes_source_table +( + id UInt64, + value_first String, + value_second String +) + ENGINE = TinyLog; + +INSERT INTO simple_key_simple_attributes_source_table VALUES(0, 'value_0', 'value_second_0'); +INSERT INTO simple_key_simple_attributes_source_table VALUES(1, 'value_1', 'value_second_1'); +INSERT INTO simple_key_simple_attributes_source_table VALUES(2, 'value_2', 'value_second_2'); + + +CREATE DICTIONARY direct_dictionary_simple_key_simple_attributes +( + `id` UInt64, + `value_first` String DEFAULT 'value_first_default', + `value_second` String DEFAULT 'value_second_default' +) + PRIMARY KEY id + SOURCE(CLICKHOUSE(TABLE 'simple_key_simple_attributes_source_table')) + LAYOUT(DIRECT()); + +SELECT + toUInt128(1), + dictGetOrDefault('direct_dictionary_simple_key_simple_attributes', 'value_second', number, toString(toFixedString(toFixedString(toFixedString(materialize(toNullable('default')), 7), 7), toNullable(toNullable(toNullable(toUInt128(7))))))) AS value_second +FROM system.numbers LIMIT 255 +FORMAT Null; + +DROP DICTIONARY IF EXISTS direct_dictionary_simple_key_simple_attributes; +DROP TABLE IF EXISTS simple_key_simple_attributes_source_table; diff --git a/tests/queries/0_stateless/03172_bcrypt_validation.reference b/tests/queries/0_stateless/03172_bcrypt_validation.reference new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/queries/0_stateless/03172_bcrypt_validation.sql b/tests/queries/0_stateless/03172_bcrypt_validation.sql new file mode 100644 index 00000000000..37dd0c9bb5d --- /dev/null +++ b/tests/queries/0_stateless/03172_bcrypt_validation.sql @@ -0,0 +1,3 @@ +-- Tags: no-fasttest +DROP USER IF EXISTS 03172_user_invalid_bcrypt_hash; +CREATE USER 03172_user_invalid_bcrypt_hash IDENTIFIED WITH bcrypt_hash BY '012345678901234567890123456789012345678901234567890123456789'; -- { serverError BAD_ARGUMENTS } diff --git a/utils/check-style/check-mypy b/utils/check-style/check-mypy index 42cb7fbbd15..4434377e627 100755 --- a/utils/check-style/check-mypy +++ b/utils/check-style/check-mypy @@ -11,13 +11,15 @@ GIT_ROOT=${GIT_ROOT:-.} CONFIG="$GIT_ROOT/tests/ci/.mypy.ini" DIRS=("$GIT_ROOT/tests/ci/" "$GIT_ROOT/tests/ci/"*/) tmp=$(mktemp) + for dir in "${DIRS[@]}"; do if ! compgen -G "$dir"/*.py > /dev/null; then continue fi - if ! mypy --config-file="$CONFIG" --sqlite-cache "$dir"/*.py > "$tmp" 2>&1; then + if ! mypy --config-file="$CONFIG" --sqlite-cache $(find "$dir" -maxdepth 1 -name "*.py" | grep -v "test_") > "$tmp" 2>&1; then echo "Errors while processing $dir": cat "$tmp" fi done + rm -rf "$tmp" diff --git a/utils/check-style/check-pylint b/utils/check-style/check-pylint index 7959a414023..8cfbc68ac96 100755 --- a/utils/check-style/check-pylint +++ b/utils/check-style/check-pylint @@ -10,6 +10,7 @@ function xargs-pylint { xargs -P "$(nproc)" -n "$1" pylint --rcfile="$ROOT_PATH/pyproject.toml" --persistent=no --score=n } -find "$ROOT_PATH/tests" -maxdepth 2 -type f -exec file -F' ' --mime-type {} + | xargs-pylint 50 +# exclude ci unittest scripts from check: test_* +find "$ROOT_PATH/tests" -maxdepth 2 -type f -exec file -F' ' --mime-type {} + | grep -v "/test_" | xargs-pylint 50 # Beware, there lambdas are checked. All of them contain `app`, and it causes brain-cucumber-zalgo find "$ROOT_PATH/tests/ci" -mindepth 2 -type f -exec file -F' ' --mime-type {} + | xargs-pylint 1