ClickHouse/src/Common/ExponentiallySmoothedCounter.h

89 lines
2.9 KiB
C++
Raw Normal View History

2021-09-12 05:28:57 +00:00
#pragma once
#include <cmath>
#include <limits>
namespace DB
{
/** https://en.wikipedia.org/wiki/Exponential_smoothing
*
* Exponentially smoothed average over time is weighted average with weight proportional to negative exponent of the time passed.
* For example, the last value is taken with weight 1/2, the value one second ago with weight 1/4, two seconds ago - 1/8, etc.
2021-09-12 06:09:57 +00:00
* It can be understood as an average over sliding window, but with different kernel.
2021-09-12 05:28:57 +00:00
*
2021-09-12 06:10:31 +00:00
* As an advantage, it is easy to update. Instead of collecting values and calculating a series of x1 / 2 + x2 / 4 + x3 / 8...
2021-09-19 17:23:06 +00:00
* just calculate x_old / 2 + x_new / 2.
2021-09-12 05:28:57 +00:00
*
* It is often used for resource usage metrics. For example, "load average" in Linux is exponentially smoothed moving average.
* We can use exponentially smoothed counters in query scheduler.
*/
2021-09-19 17:23:06 +00:00
struct ExponentiallySmoothedAverage
2021-09-12 05:28:57 +00:00
{
double value = 0;
2021-09-19 17:23:06 +00:00
double update_time = 0;
2021-09-12 05:28:57 +00:00
2021-09-19 17:23:06 +00:00
ExponentiallySmoothedAverage()
2021-09-12 05:28:57 +00:00
{
}
2021-09-19 17:23:06 +00:00
ExponentiallySmoothedAverage(double current_value, double current_time)
2021-09-12 05:28:57 +00:00
: value(current_value), update_time(current_time)
{
}
2021-09-19 17:23:06 +00:00
static double scale(double time_passed, double half_decay_time)
2021-09-12 05:28:57 +00:00
{
2021-09-19 17:23:06 +00:00
return exp2(-time_passed / half_decay_time);
2021-09-12 05:28:57 +00:00
}
2021-09-19 17:23:06 +00:00
static double sumWeights(double half_decay_time)
2021-09-12 05:28:57 +00:00
{
2021-09-19 17:23:06 +00:00
double k = scale(1.0, half_decay_time);
return 1 / (1 - k);
2021-09-12 05:28:57 +00:00
}
2021-09-19 17:23:06 +00:00
ExponentiallySmoothedAverage remap(double current_time, double half_decay_time) const
{
2021-09-19 17:23:06 +00:00
return ExponentiallySmoothedAverage(value * scale(current_time - update_time, half_decay_time), current_time);
}
2021-09-19 17:23:06 +00:00
static ExponentiallySmoothedAverage merge(const ExponentiallySmoothedAverage & a, const ExponentiallySmoothedAverage & b, double half_decay_time)
{
2021-09-19 17:23:06 +00:00
if (a.update_time > b.update_time)
return ExponentiallySmoothedAverage(a.value + b.remap(a.update_time, half_decay_time).value, a.update_time);
if (a.update_time < b.update_time)
return ExponentiallySmoothedAverage(b.value + a.remap(b.update_time, half_decay_time).value, b.update_time);
return ExponentiallySmoothedAverage(a.value + b.value, a.update_time);
}
2021-09-19 17:23:06 +00:00
void merge(const ExponentiallySmoothedAverage & other, double half_decay_time)
2021-09-12 05:28:57 +00:00
{
2021-09-19 17:23:06 +00:00
*this = merge(*this, other, half_decay_time);
2021-09-12 05:28:57 +00:00
}
void add(double new_value, double current_time, double half_decay_time)
2021-09-12 05:28:57 +00:00
{
2021-09-19 17:23:06 +00:00
merge(ExponentiallySmoothedAverage(new_value, current_time), half_decay_time);
}
double get(double half_decay_time) const
{
return value / sumWeights(half_decay_time);
}
double get(double current_time, double half_decay_time) const
{
return remap(current_time, half_decay_time).get(half_decay_time);
}
bool less(const ExponentiallySmoothedAverage & other, double half_decay_time) const
{
return remap(other.update_time, half_decay_time).value < other.value;
2021-09-12 05:28:57 +00:00
}
};
}