update perf tests
sed -i s'/^<test.*$/<test>/g' tests/performance/*.xml
WITH ceil(max(q[3]), 1) AS h
SELECT concat('sed -i s\'/^<test.*$/<test max_ignored_relative_change="', toString(h), '">/g\' tests/performance/', test, '.xml') AS s
FROM
(
SELECT
test,
query_index,
count(*),
min(event_time),
max(event_time) AS t,
arrayMap(x -> floor(x, 3), quantiles(0, 0.5, 0.95, 1)(stat_threshold)) AS q,
median(stat_threshold) AS m
FROM perftest.query_metrics_v2
WHERE ((pr_number != 0) AND (event_date > '2021-01-01')) AND (metric = 'client_time') AND (abs(diff) < 0.05) AND (old_value > 0.1)
GROUP BY
test,
query_index,
query_display_name
HAVING (t > '2021-01-01 00:00:00') AND ((q[3]) > 0.1)
ORDER BY test DESC
)
GROUP BY test
ORDER BY h DESC
FORMAT PrettySpace
2021-02-02 14:21:43 +00:00
|
|
|
<test max_ignored_relative_change="0.2">
|
2020-05-06 15:52:56 +00:00
|
|
|
<create_query>
|
|
|
|
CREATE TABLE hits_wide AS hits_10m_single ENGINE = MergeTree()
|
|
|
|
PARTITION BY toYYYYMM(EventDate)
|
|
|
|
ORDER BY (CounterID, EventDate, intHash32(UserID))
|
|
|
|
SAMPLE BY intHash32(UserID)
|
|
|
|
SETTINGS min_rows_for_wide_part = 0, min_bytes_for_wide_part = 0
|
|
|
|
</create_query>
|
|
|
|
<create_query>
|
|
|
|
CREATE TABLE hits_compact AS hits_10m_single ENGINE = MergeTree()
|
|
|
|
PARTITION BY toYYYYMM(EventDate)
|
|
|
|
ORDER BY (CounterID, EventDate, intHash32(UserID))
|
|
|
|
SAMPLE BY intHash32(UserID)
|
|
|
|
SETTINGS min_bytes_for_wide_part = '10M'
|
|
|
|
</create_query>
|
2020-05-15 00:53:12 +00:00
|
|
|
<create_query>
|
|
|
|
CREATE TABLE hits_memory AS hits_10m_single ENGINE = MergeTree()
|
|
|
|
PARTITION BY toYYYYMM(EventDate)
|
|
|
|
ORDER BY (CounterID, EventDate, intHash32(UserID))
|
|
|
|
SAMPLE BY intHash32(UserID)
|
|
|
|
SETTINGS min_bytes_for_compact_part = '1M', min_bytes_for_wide_part = '10M', in_memory_parts_enable_wal = 1
|
|
|
|
</create_query>
|
2020-05-06 15:52:56 +00:00
|
|
|
<create_query>
|
|
|
|
CREATE TABLE hits_buffer AS hits_10m_single
|
2020-07-03 06:12:15 +00:00
|
|
|
ENGINE = Buffer(default, hits_wide, 1, 0, 0, 10000, 10000, 0, 0)
|
2020-05-06 15:52:56 +00:00
|
|
|
</create_query>
|
|
|
|
|
2020-06-11 16:14:22 +00:00
|
|
|
<!-- Emulate writing many parts with 1 row, because single insert query is too fast -->
|
|
|
|
<settings>
|
|
|
|
<max_block_size>1</max_block_size>
|
|
|
|
<min_insert_block_size_rows>1</min_insert_block_size_rows>
|
|
|
|
</settings>
|
2020-05-06 15:52:56 +00:00
|
|
|
|
2020-06-11 21:33:30 +00:00
|
|
|
<!-- 100 parts -->
|
2020-06-23 07:00:05 +00:00
|
|
|
<query>INSERT INTO hits_wide(UserID) SELECT rand() FROM numbers(100)</query>
|
2020-05-06 15:52:56 +00:00
|
|
|
<query>INSERT INTO hits_compact(UserID) SELECT rand() FROM numbers(1000)</query>
|
2020-07-03 08:46:10 +00:00
|
|
|
<query>INSERT INTO hits_buffer(UserID) SELECT rand() FROM numbers(100)</query>
|
2020-05-06 15:52:56 +00:00
|
|
|
|
|
|
|
<drop_query>DROP TABLE IF EXISTS hits_wide</drop_query>
|
|
|
|
<drop_query>DROP TABLE IF EXISTS hits_compact</drop_query>
|
2020-05-15 00:53:12 +00:00
|
|
|
<drop_query>DROP TABLE IF EXISTS hits_memory</drop_query>
|
2020-05-06 15:52:56 +00:00
|
|
|
<drop_query>DROP TABLE IF EXISTS hits_buffer</drop_query>
|
|
|
|
</test>
|