2017-04-09 12:26:41 +00:00
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#include <Columns/ColumnArray.h>
|
2019-08-21 02:28:04 +00:00
|
|
|
#include <Common/assert_cast.h>
|
2017-04-09 12:26:41 +00:00
|
|
|
#include <DataTypes/DataTypeArray.h>
|
|
|
|
#include <AggregateFunctions/IAggregateFunction.h>
|
2017-04-16 03:38:37 +00:00
|
|
|
|
2017-04-09 12:26:41 +00:00
|
|
|
#include <IO/WriteBuffer.h>
|
|
|
|
#include <IO/ReadBuffer.h>
|
2017-04-16 03:38:37 +00:00
|
|
|
#include <IO/WriteHelpers.h>
|
|
|
|
#include <IO/ReadHelpers.h>
|
|
|
|
|
2017-04-09 12:26:41 +00:00
|
|
|
|
|
|
|
namespace DB
|
|
|
|
{
|
|
|
|
|
|
|
|
namespace ErrorCodes
|
|
|
|
{
|
2020-02-25 18:02:41 +00:00
|
|
|
extern const int ILLEGAL_TYPE_OF_ARGUMENT;
|
2017-12-20 07:36:30 +00:00
|
|
|
extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH;
|
2017-12-23 23:16:42 +00:00
|
|
|
extern const int SIZES_OF_ARRAYS_DOESNT_MATCH;
|
2017-04-09 12:26:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
struct AggregateFunctionForEachData
|
|
|
|
{
|
2017-04-16 03:38:37 +00:00
|
|
|
size_t dynamic_array_size = 0;
|
|
|
|
char * array_of_aggregate_datas = nullptr;
|
2017-04-09 12:26:41 +00:00
|
|
|
};
|
|
|
|
|
2017-04-16 03:38:37 +00:00
|
|
|
/** Adaptor for aggregate functions.
|
|
|
|
* Adding -ForEach suffix to aggregate function
|
|
|
|
* will convert that aggregate function to a function, accepting arrays,
|
|
|
|
* and applies aggregation for each corresponding elements of arrays independently,
|
|
|
|
* returning arrays of aggregated values on corresponding positions.
|
|
|
|
*
|
|
|
|
* Example: sumForEach of:
|
|
|
|
* [1, 2],
|
|
|
|
* [3, 4, 5],
|
|
|
|
* [6, 7]
|
|
|
|
* will return:
|
|
|
|
* [10, 13, 5]
|
2017-12-20 07:36:30 +00:00
|
|
|
*
|
|
|
|
* TODO Allow variable number of arguments.
|
2017-04-16 03:38:37 +00:00
|
|
|
*/
|
2017-12-20 08:14:33 +00:00
|
|
|
class AggregateFunctionForEach final : public IAggregateFunctionDataHelper<AggregateFunctionForEachData, AggregateFunctionForEach>
|
2017-04-09 12:26:41 +00:00
|
|
|
{
|
|
|
|
private:
|
2017-12-20 07:36:30 +00:00
|
|
|
AggregateFunctionPtr nested_func;
|
2017-04-16 03:38:37 +00:00
|
|
|
size_t nested_size_of_data = 0;
|
2017-12-23 23:16:42 +00:00
|
|
|
size_t num_arguments;
|
2017-04-16 03:38:37 +00:00
|
|
|
|
|
|
|
AggregateFunctionForEachData & ensureAggregateData(AggregateDataPtr place, size_t new_size, Arena & arena) const
|
|
|
|
{
|
|
|
|
AggregateFunctionForEachData & state = data(place);
|
|
|
|
|
2020-01-11 09:50:41 +00:00
|
|
|
/// Ensure we have aggregate states for new_size elements, allocate
|
2019-06-28 18:42:14 +00:00
|
|
|
/// from arena if needed. When reallocating, we can't copy the
|
|
|
|
/// states to new buffer with memcpy, because they may contain pointers
|
|
|
|
/// to themselves. In particular, this happens when a state contains
|
|
|
|
/// a PODArrayWithStackMemory, which stores small number of elements
|
|
|
|
/// inline. This is why we create new empty states in the new buffer,
|
|
|
|
/// and merge the old states to them.
|
2017-04-16 03:38:37 +00:00
|
|
|
size_t old_size = state.dynamic_array_size;
|
|
|
|
if (old_size < new_size)
|
|
|
|
{
|
2019-06-28 18:42:14 +00:00
|
|
|
char * old_state = state.array_of_aggregate_datas;
|
|
|
|
char * new_state = arena.alignedAlloc(
|
2018-09-01 04:25:03 +00:00
|
|
|
new_size * nested_size_of_data,
|
|
|
|
nested_func->alignOfData());
|
2017-04-16 03:38:37 +00:00
|
|
|
|
2019-06-28 18:42:14 +00:00
|
|
|
size_t i;
|
2017-04-16 03:38:37 +00:00
|
|
|
try
|
|
|
|
{
|
2019-06-28 18:42:14 +00:00
|
|
|
for (i = 0; i < new_size; ++i)
|
2017-04-16 03:38:37 +00:00
|
|
|
{
|
2019-06-28 18:42:14 +00:00
|
|
|
nested_func->create(&new_state[i * nested_size_of_data]);
|
2017-04-16 03:38:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
catch (...)
|
|
|
|
{
|
|
|
|
size_t cleanup_size = i;
|
|
|
|
|
|
|
|
for (i = 0; i < cleanup_size; ++i)
|
|
|
|
{
|
2019-06-28 18:42:14 +00:00
|
|
|
nested_func->destroy(&new_state[i * nested_size_of_data]);
|
2017-04-16 03:38:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
throw;
|
|
|
|
}
|
|
|
|
|
2019-06-28 18:42:14 +00:00
|
|
|
for (i = 0; i < old_size; i++)
|
|
|
|
{
|
|
|
|
nested_func->merge(&new_state[i * nested_size_of_data],
|
|
|
|
&old_state[i * nested_size_of_data],
|
|
|
|
&arena);
|
|
|
|
}
|
|
|
|
|
|
|
|
state.array_of_aggregate_datas = new_state;
|
2017-04-16 03:38:37 +00:00
|
|
|
state.dynamic_array_size = new_size;
|
|
|
|
}
|
|
|
|
|
|
|
|
return state;
|
|
|
|
}
|
2017-04-09 12:26:41 +00:00
|
|
|
|
|
|
|
public:
|
2017-12-20 07:36:30 +00:00
|
|
|
AggregateFunctionForEach(AggregateFunctionPtr nested_, const DataTypes & arguments)
|
2019-02-11 19:26:32 +00:00
|
|
|
: IAggregateFunctionDataHelper<AggregateFunctionForEachData, AggregateFunctionForEach>(arguments, {})
|
|
|
|
, nested_func(nested_), num_arguments(arguments.size())
|
2017-04-16 03:38:37 +00:00
|
|
|
{
|
2017-12-21 00:36:36 +00:00
|
|
|
nested_size_of_data = nested_func->sizeOfData();
|
|
|
|
|
2017-12-20 07:36:30 +00:00
|
|
|
if (arguments.empty())
|
|
|
|
throw Exception("Aggregate function " + getName() + " require at least one argument", ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
|
|
|
|
|
|
|
for (const auto & type : arguments)
|
2018-09-10 17:09:07 +00:00
|
|
|
if (!isArray(type))
|
2017-12-20 07:36:30 +00:00
|
|
|
throw Exception("All arguments for aggregate function " + getName() + " must be arrays", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
2017-04-16 03:38:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
String getName() const override
|
|
|
|
{
|
|
|
|
return nested_func->getName() + "ForEach";
|
|
|
|
}
|
|
|
|
|
|
|
|
DataTypePtr getReturnType() const override
|
|
|
|
{
|
|
|
|
return std::make_shared<DataTypeArray>(nested_func->getReturnType());
|
|
|
|
}
|
|
|
|
|
|
|
|
void destroy(AggregateDataPtr place) const noexcept override
|
|
|
|
{
|
|
|
|
AggregateFunctionForEachData & state = data(place);
|
|
|
|
|
|
|
|
char * nested_state = state.array_of_aggregate_datas;
|
|
|
|
for (size_t i = 0; i < state.dynamic_array_size; ++i)
|
|
|
|
{
|
|
|
|
nested_func->destroy(nested_state);
|
|
|
|
nested_state += nested_size_of_data;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool hasTrivialDestructor() const override
|
|
|
|
{
|
|
|
|
return nested_func->hasTrivialDestructor();
|
|
|
|
}
|
|
|
|
|
|
|
|
void add(AggregateDataPtr place, const IColumn ** columns, size_t row_num, Arena * arena) const override
|
|
|
|
{
|
2017-12-23 23:16:42 +00:00
|
|
|
const IColumn * nested[num_arguments];
|
|
|
|
|
|
|
|
for (size_t i = 0; i < num_arguments; ++i)
|
2019-08-21 02:28:04 +00:00
|
|
|
nested[i] = &assert_cast<const ColumnArray &>(*columns[i]).getData();
|
2017-12-23 23:16:42 +00:00
|
|
|
|
2019-08-21 02:28:04 +00:00
|
|
|
const ColumnArray & first_array_column = assert_cast<const ColumnArray &>(*columns[0]);
|
2017-12-15 21:32:25 +00:00
|
|
|
const IColumn::Offsets & offsets = first_array_column.getOffsets();
|
2017-12-23 23:16:42 +00:00
|
|
|
|
2018-12-24 14:26:38 +00:00
|
|
|
size_t begin = offsets[row_num - 1];
|
2017-04-16 03:38:37 +00:00
|
|
|
size_t end = offsets[row_num];
|
|
|
|
|
2017-12-23 23:16:42 +00:00
|
|
|
/// Sanity check. NOTE We can implement specialization for a case with single argument, if the check will hurt performance.
|
|
|
|
for (size_t i = 1; i < num_arguments; ++i)
|
|
|
|
{
|
2019-08-21 02:28:04 +00:00
|
|
|
const ColumnArray & ith_column = assert_cast<const ColumnArray &>(*columns[i]);
|
2017-12-23 23:16:42 +00:00
|
|
|
const IColumn::Offsets & ith_offsets = ith_column.getOffsets();
|
|
|
|
|
|
|
|
if (ith_offsets[row_num] != end || (row_num != 0 && ith_offsets[row_num - 1] != begin))
|
|
|
|
throw Exception("Arrays passed to " + getName() + " aggregate function have different sizes", ErrorCodes::SIZES_OF_ARRAYS_DOESNT_MATCH);
|
|
|
|
}
|
2017-04-16 03:38:37 +00:00
|
|
|
|
2017-12-23 23:16:42 +00:00
|
|
|
AggregateFunctionForEachData & state = ensureAggregateData(place, end - begin, *arena);
|
2017-04-16 03:38:37 +00:00
|
|
|
|
|
|
|
char * nested_state = state.array_of_aggregate_datas;
|
|
|
|
for (size_t i = begin; i < end; ++i)
|
|
|
|
{
|
2017-12-23 23:16:42 +00:00
|
|
|
nested_func->add(nested_state, nested, i, arena);
|
2017-04-16 03:38:37 +00:00
|
|
|
nested_state += nested_size_of_data;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void merge(AggregateDataPtr place, ConstAggregateDataPtr rhs, Arena * arena) const override
|
|
|
|
{
|
|
|
|
const AggregateFunctionForEachData & rhs_state = data(rhs);
|
|
|
|
AggregateFunctionForEachData & state = ensureAggregateData(place, rhs_state.dynamic_array_size, *arena);
|
|
|
|
|
|
|
|
const char * rhs_nested_state = rhs_state.array_of_aggregate_datas;
|
|
|
|
char * nested_state = state.array_of_aggregate_datas;
|
|
|
|
|
|
|
|
for (size_t i = 0; i < state.dynamic_array_size && i < rhs_state.dynamic_array_size; ++i)
|
|
|
|
{
|
|
|
|
nested_func->merge(nested_state, rhs_nested_state, arena);
|
|
|
|
|
|
|
|
rhs_nested_state += nested_size_of_data;
|
|
|
|
nested_state += nested_size_of_data;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void serialize(ConstAggregateDataPtr place, WriteBuffer & buf) const override
|
|
|
|
{
|
|
|
|
const AggregateFunctionForEachData & state = data(place);
|
|
|
|
writeBinary(state.dynamic_array_size, buf);
|
|
|
|
|
|
|
|
const char * nested_state = state.array_of_aggregate_datas;
|
|
|
|
for (size_t i = 0; i < state.dynamic_array_size; ++i)
|
|
|
|
{
|
|
|
|
nested_func->serialize(nested_state, buf);
|
|
|
|
nested_state += nested_size_of_data;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void deserialize(AggregateDataPtr place, ReadBuffer & buf, Arena * arena) const override
|
|
|
|
{
|
|
|
|
AggregateFunctionForEachData & state = data(place);
|
|
|
|
|
|
|
|
size_t new_size = 0;
|
|
|
|
readBinary(new_size, buf);
|
|
|
|
|
|
|
|
ensureAggregateData(place, new_size, *arena);
|
|
|
|
|
|
|
|
char * nested_state = state.array_of_aggregate_datas;
|
|
|
|
for (size_t i = 0; i < new_size; ++i)
|
|
|
|
{
|
|
|
|
nested_func->deserialize(nested_state, buf, arena);
|
|
|
|
nested_state += nested_size_of_data;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-14 07:59:14 +00:00
|
|
|
void insertResultInto(AggregateDataPtr place, IColumn & to) const override
|
2017-04-16 03:38:37 +00:00
|
|
|
{
|
2020-05-14 07:59:14 +00:00
|
|
|
AggregateFunctionForEachData & state = data(place);
|
2017-04-16 03:38:37 +00:00
|
|
|
|
2019-08-21 02:28:04 +00:00
|
|
|
ColumnArray & arr_to = assert_cast<ColumnArray &>(to);
|
2017-12-15 21:32:25 +00:00
|
|
|
ColumnArray::Offsets & offsets_to = arr_to.getOffsets();
|
2017-04-16 03:38:37 +00:00
|
|
|
IColumn & elems_to = arr_to.getData();
|
|
|
|
|
2020-05-14 07:59:14 +00:00
|
|
|
char * nested_state = state.array_of_aggregate_datas;
|
2017-04-16 03:38:37 +00:00
|
|
|
for (size_t i = 0; i < state.dynamic_array_size; ++i)
|
|
|
|
{
|
|
|
|
nested_func->insertResultInto(nested_state, elems_to);
|
|
|
|
nested_state += nested_size_of_data;
|
|
|
|
}
|
|
|
|
|
2019-01-08 14:55:37 +00:00
|
|
|
offsets_to.push_back(offsets_to.back() + state.dynamic_array_size);
|
2017-04-16 03:38:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool allocatesMemoryInArena() const override
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
2017-04-09 12:26:41 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
}
|