mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-23 16:12:01 +00:00
Make building column arrays for chunk processing straightforward
This commit is contained in:
parent
24f3a6905f
commit
589104fa6e
@ -46,58 +46,73 @@ DistinctSortedTransform::DistinctSortedTransform(
|
||||
|
||||
void DistinctSortedTransform::transform(Chunk & chunk)
|
||||
{
|
||||
const ColumnRawPtrs column_ptrs(getKeyColumns(chunk));
|
||||
if (column_ptrs.empty())
|
||||
return;
|
||||
if (!chunk.hasRows())
|
||||
return;
|
||||
|
||||
ColumnRawPtrs clearing_hint_columns(getClearingColumns(column_ptrs));
|
||||
/// get DISTINCT columns from chunk
|
||||
ColumnRawPtrs column_ptrs;
|
||||
column_ptrs.reserve(column_positions.size());
|
||||
for (const auto pos : column_positions)
|
||||
{
|
||||
const auto & column = chunk.getColumns()[pos];
|
||||
column_ptrs.emplace_back(column.get());
|
||||
}
|
||||
|
||||
if (data.type == ClearableSetVariants::Type::EMPTY)
|
||||
data.init(ClearableSetVariants::chooseMethod(column_ptrs, key_sizes));
|
||||
/// get DISTINCT columns from chunk which form sort prefix of sort description
|
||||
ColumnRawPtrs clearing_hint_columns;
|
||||
clearing_hint_columns.reserve(sort_prefix_positions.size());
|
||||
for (const auto pos : sort_prefix_positions)
|
||||
{
|
||||
const auto & column = chunk.getColumns()[pos];
|
||||
clearing_hint_columns.emplace_back(column.get());
|
||||
}
|
||||
|
||||
const size_t rows = chunk.getNumRows();
|
||||
IColumn::Filter filter(rows);
|
||||
if (data.type == ClearableSetVariants::Type::EMPTY)
|
||||
data.init(ClearableSetVariants::chooseMethod(column_ptrs, key_sizes));
|
||||
|
||||
bool has_new_data = false;
|
||||
switch (data.type)
|
||||
{
|
||||
case ClearableSetVariants::Type::EMPTY:
|
||||
break;
|
||||
#define M(NAME) \
|
||||
case ClearableSetVariants::Type::NAME: \
|
||||
has_new_data = buildFilter(*data.NAME, column_ptrs, clearing_hint_columns, filter, rows, data); \
|
||||
break;
|
||||
APPLY_FOR_SET_VARIANTS(M)
|
||||
#undef M
|
||||
}
|
||||
const size_t rows = chunk.getNumRows();
|
||||
IColumn::Filter filter(rows);
|
||||
|
||||
/// Just go to the next block if there isn't any new record in the current one.
|
||||
if (!has_new_data)
|
||||
{
|
||||
chunk.clear();
|
||||
return;
|
||||
}
|
||||
bool has_new_data = false;
|
||||
switch (data.type)
|
||||
{
|
||||
case ClearableSetVariants::Type::EMPTY:
|
||||
break;
|
||||
#define M(NAME) \
|
||||
case ClearableSetVariants::Type::NAME: \
|
||||
has_new_data = buildFilter(*data.NAME, column_ptrs, clearing_hint_columns, filter, rows, data); \
|
||||
break;
|
||||
APPLY_FOR_SET_VARIANTS(M)
|
||||
#undef M
|
||||
}
|
||||
|
||||
if (!set_size_limits.check(data.getTotalRowCount(), data.getTotalByteCount(), "DISTINCT", ErrorCodes::SET_SIZE_LIMIT_EXCEEDED))
|
||||
{
|
||||
stopReading();
|
||||
chunk.clear();
|
||||
return;
|
||||
}
|
||||
/// Just go to the next block if there isn't any new record in the current one.
|
||||
if (!has_new_data)
|
||||
{
|
||||
chunk.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
/// Stop reading if we already reached the limit.
|
||||
if (limit_hint && data.getTotalRowCount() >= limit_hint)
|
||||
stopReading();
|
||||
if (!set_size_limits.check(data.getTotalRowCount(), data.getTotalByteCount(), "DISTINCT", ErrorCodes::SET_SIZE_LIMIT_EXCEEDED))
|
||||
{
|
||||
stopReading();
|
||||
chunk.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
prev_chunk.chunk = std::move(chunk);
|
||||
prev_chunk.clearing_hint_columns = std::move(clearing_hint_columns);
|
||||
/// Stop reading if we already reached the limit.
|
||||
if (limit_hint && data.getTotalRowCount() >= limit_hint)
|
||||
stopReading();
|
||||
|
||||
size_t all_columns = prev_chunk.chunk.getNumColumns();
|
||||
Chunk res_chunk;
|
||||
for (size_t i = 0; i < all_columns; ++i)
|
||||
res_chunk.addColumn(prev_chunk.chunk.getColumns().at(i)->filter(filter, -1));
|
||||
prev_chunk.chunk = std::move(chunk);
|
||||
prev_chunk.clearing_hint_columns = std::move(clearing_hint_columns);
|
||||
|
||||
chunk = std::move(res_chunk);
|
||||
size_t all_columns = prev_chunk.chunk.getNumColumns();
|
||||
Chunk res_chunk;
|
||||
for (size_t i = 0; i < all_columns; ++i)
|
||||
res_chunk.addColumn(prev_chunk.chunk.getColumns().at(i)->filter(filter, -1));
|
||||
|
||||
chunk = std::move(res_chunk);
|
||||
}
|
||||
|
||||
|
||||
@ -142,28 +157,6 @@ bool DistinctSortedTransform::buildFilter(
|
||||
return has_new_data;
|
||||
}
|
||||
|
||||
ColumnRawPtrs DistinctSortedTransform::getKeyColumns(const Chunk & chunk) const
|
||||
{
|
||||
ColumnRawPtrs column_ptrs;
|
||||
column_ptrs.reserve(column_positions.size());
|
||||
for (const auto pos : column_positions)
|
||||
{
|
||||
const auto & column = chunk.getColumns()[pos];
|
||||
column_ptrs.emplace_back(column.get());
|
||||
}
|
||||
return column_ptrs;
|
||||
}
|
||||
|
||||
ColumnRawPtrs DistinctSortedTransform::getClearingColumns(const ColumnRawPtrs & key_columns) const
|
||||
{
|
||||
ColumnRawPtrs clearing_hint_columns;
|
||||
clearing_hint_columns.reserve(sort_prefix_positions.size());
|
||||
for (const auto pos : sort_prefix_positions)
|
||||
clearing_hint_columns.emplace_back(key_columns[pos]);
|
||||
|
||||
return clearing_hint_columns;
|
||||
}
|
||||
|
||||
bool DistinctSortedTransform::rowsEqual(const ColumnRawPtrs & lhs, size_t n, const ColumnRawPtrs & rhs, size_t m)
|
||||
{
|
||||
for (size_t column_index = 0, num_columns = lhs.size(); column_index < num_columns; ++column_index)
|
||||
|
@ -32,10 +32,6 @@ protected:
|
||||
void transform(Chunk & chunk) override;
|
||||
|
||||
private:
|
||||
ColumnRawPtrs getKeyColumns(const Chunk & chunk) const;
|
||||
/// When clearing_columns changed, we can clean HashSet to memory optimization
|
||||
/// clearing_columns is a left-prefix of SortDescription exists in key_columns
|
||||
ColumnRawPtrs getClearingColumns(const ColumnRawPtrs & key_columns) const;
|
||||
static bool rowsEqual(const ColumnRawPtrs & lhs, size_t n, const ColumnRawPtrs & rhs, size_t m);
|
||||
|
||||
/// return true if has new data
|
||||
|
Loading…
Reference in New Issue
Block a user