added offset for limit by

This commit is contained in:
CurtizJ 2019-04-29 16:12:39 +03:00
parent a76e504f45
commit 7766f5c0c3
8 changed files with 70 additions and 26 deletions

View File

@ -5,9 +5,11 @@
namespace DB
{
LimitByBlockInputStream::LimitByBlockInputStream(const BlockInputStreamPtr & input, size_t group_size_, const Names & columns)
LimitByBlockInputStream::LimitByBlockInputStream(const BlockInputStreamPtr & input,
size_t group_length_, size_t group_offset_, const Names & columns)
: columns_names(columns)
, group_size(group_size_)
, group_length(group_length_)
, group_offset(group_offset_)
{
children.push_back(input);
}
@ -37,7 +39,8 @@ Block LimitByBlockInputStream::readImpl()
hash.get128(key.low, key.high);
if (keys_counts[key]++ < group_size)
auto count = keys_counts[key]++;
if (count >= group_offset && count < group_length + group_offset)
{
inserted_count++;
filter[i] = 1;

View File

@ -18,7 +18,7 @@ namespace DB
class LimitByBlockInputStream : public IBlockInputStream
{
public:
LimitByBlockInputStream(const BlockInputStreamPtr & input, size_t group_size_, const Names & columns);
LimitByBlockInputStream(const BlockInputStreamPtr & input, size_t group_length_, size_t group_offset_, const Names & columns);
String getName() const override { return "LimitBy"; }
@ -34,7 +34,8 @@ private:
using MapHashed = HashMap<UInt128, UInt64, UInt128TrivialHash>;
const Names columns_names;
const size_t group_size;
const size_t group_length;
const size_t group_offset;
MapHashed keys_counts;
};

View File

@ -1562,18 +1562,18 @@ void InterpreterSelectQuery::executePreLimit(Pipeline & pipeline)
void InterpreterSelectQuery::executeLimitBy(Pipeline & pipeline)
{
auto & query = getSelectQuery();
if (!query.limitByValue() || !query.limitBy())
if (!query.limitByLength() || !query.limitBy())
return;
Names columns;
for (const auto & elem : query.limitBy()->children)
columns.emplace_back(elem->getColumnName());
UInt64 value = getLimitUIntValue(query.limitByValue(), context);
UInt64 length = getLimitUIntValue(query.limitByLength(), context);
UInt64 offset = (query.limitByOffset() ? getLimitUIntValue(query.limitByOffset(), context) : 0);
pipeline.transform([&](auto & stream)
{
stream = std::make_shared<LimitByBlockInputStream>(stream, value, columns);
stream = std::make_shared<LimitByBlockInputStream>(stream, length, offset, columns);
});
}

View File

@ -42,7 +42,8 @@ ASTPtr ASTSelectQuery::clone() const
CLONE(Expression::GROUP_BY);
CLONE(Expression::HAVING);
CLONE(Expression::ORDER_BY);
CLONE(Expression::LIMIT_BY_VALUE);
CLONE(Expression::LIMIT_BY_OFFSET);
CLONE(Expression::LIMIT_BY_LENGTH);
CLONE(Expression::LIMIT_BY);
CLONE(Expression::LIMIT_OFFSET);
CLONE(Expression::LIMIT_LENGTH);
@ -124,10 +125,15 @@ void ASTSelectQuery::formatImpl(const FormatSettings & s, FormatState & state, F
: orderBy()->as<ASTExpressionList &>().formatImplMultiline(s, state, frame);
}
if (limitByValue())
if (limitByLength())
{
s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "LIMIT " << (s.hilite ? hilite_none : "");
limitByValue()->formatImpl(s, state, frame);
if (limitByOffset())
{
limitByOffset()->formatImpl(s, state, frame);
s.ostr << ", ";
}
limitByLength()->formatImpl(s, state, frame);
s.ostr << (s.hilite ? hilite_keyword : "") << " BY " << (s.hilite ? hilite_none : "");
s.one_line
? limitBy()->formatImpl(s, state, frame)

View File

@ -25,7 +25,8 @@ public:
GROUP_BY,
HAVING,
ORDER_BY,
LIMIT_BY_VALUE,
LIMIT_BY_OFFSET,
LIMIT_BY_LENGTH,
LIMIT_BY,
LIMIT_OFFSET,
LIMIT_LENGTH,
@ -56,7 +57,8 @@ public:
const ASTPtr groupBy() const { return getExpression(Expression::GROUP_BY); }
const ASTPtr having() const { return getExpression(Expression::HAVING); }
const ASTPtr orderBy() const { return getExpression(Expression::ORDER_BY); }
const ASTPtr limitByValue() const { return getExpression(Expression::LIMIT_BY_VALUE); }
const ASTPtr limitByOffset() const { return getExpression(Expression::LIMIT_BY_OFFSET); }
const ASTPtr limitByLength() const { return getExpression(Expression::LIMIT_BY_LENGTH); }
const ASTPtr limitBy() const { return getExpression(Expression::LIMIT_BY); }
const ASTPtr limitOffset() const { return getExpression(Expression::LIMIT_OFFSET); }
const ASTPtr limitLength() const { return getExpression(Expression::LIMIT_LENGTH); }

View File

@ -60,7 +60,8 @@ bool ParserSelectQuery::parseImpl(Pos & pos, ASTPtr & node, Expected & expected)
ASTPtr group_expression_list;
ASTPtr having_expression;
ASTPtr order_expression_list;
ASTPtr limit_by_value;
ASTPtr limit_by_length;
ASTPtr limit_by_offset;
ASTPtr limit_by_expression_list;
ASTPtr limit_offset;
ASTPtr limit_length;
@ -180,7 +181,7 @@ bool ParserSelectQuery::parseImpl(Pos & pos, ASTPtr & node, Expected & expected)
return false;
}
/// LIMIT length | LIMIT offset, length | LIMIT count BY expr-list
/// LIMIT length | LIMIT offset, length | LIMIT count BY expr-list | LIMIT offset, length BY expr-list
if (s_limit.ignore(pos, expected))
{
if (limit_length)
@ -197,25 +198,27 @@ bool ParserSelectQuery::parseImpl(Pos & pos, ASTPtr & node, Expected & expected)
if (!exp_elem.parse(pos, limit_length, expected))
return false;
}
else if (s_by.ignore(pos, expected))
{
limit_by_value = limit_length;
limit_length = nullptr;
if (!exp_list.parse(pos, limit_by_expression_list, expected))
return false;
}
else if (s_offset.ignore(pos, expected))
{
if (!exp_elem.parse(pos, limit_offset, expected))
return false;
}
if (s_by.ignore(pos, expected))
{
limit_by_length = limit_length;
limit_by_offset = limit_offset;
limit_length = nullptr;
limit_offset = nullptr;
if (!exp_list.parse(pos, limit_by_expression_list, expected))
return false;
}
}
/// LIMIT length | LIMIT offset, length
if (s_limit.ignore(pos, expected))
{
if (!limit_by_value || limit_length)
if (!limit_by_length|| limit_length)
return false;
ParserToken s_comma(TokenType::Comma);
@ -229,6 +232,11 @@ bool ParserSelectQuery::parseImpl(Pos & pos, ASTPtr & node, Expected & expected)
if (!exp_elem.parse(pos, limit_length, expected))
return false;
}
else if (s_offset.ignore(pos, expected))
{
if (!exp_elem.parse(pos, limit_offset, expected))
return false;
}
}
/// SETTINGS key1 = value1, key2 = value2, ...
@ -248,7 +256,8 @@ bool ParserSelectQuery::parseImpl(Pos & pos, ASTPtr & node, Expected & expected)
select_query->setExpression(ASTSelectQuery::Expression::GROUP_BY, std::move(group_expression_list));
select_query->setExpression(ASTSelectQuery::Expression::HAVING, std::move(having_expression));
select_query->setExpression(ASTSelectQuery::Expression::ORDER_BY, std::move(order_expression_list));
select_query->setExpression(ASTSelectQuery::Expression::LIMIT_BY_VALUE, std::move(limit_by_value));
select_query->setExpression(ASTSelectQuery::Expression::LIMIT_BY_OFFSET, std::move(limit_by_offset));
select_query->setExpression(ASTSelectQuery::Expression::LIMIT_BY_LENGTH, std::move(limit_by_length));
select_query->setExpression(ASTSelectQuery::Expression::LIMIT_BY, std::move(limit_by_expression_list));
select_query->setExpression(ASTSelectQuery::Expression::LIMIT_OFFSET, std::move(limit_offset));
select_query->setExpression(ASTSelectQuery::Expression::LIMIT_LENGTH, std::move(limit_length));

View File

@ -0,0 +1,13 @@
1 120
1 130
2 220
1 110
1 120
2 210
2 220
1 110
1 120
2 210
1 120
2 210
2 220

View File

@ -0,0 +1,10 @@
drop table if exists test.limit_by;
create table test.limit_by(id Int, val Int) engine = Memory;
insert into test.limit_by values(1, 100), (1, 110), (1, 120), (1, 130), (2, 200), (2, 210), (2, 220), (3, 300);
select * from test.limit_by order by id, val limit 2, 2 by id;
select * from test.limit_by order by id, val limit 2 offset 1 by id;
select * from test.limit_by order by id, val limit 1, 2 by id limit 3;
select * from test.limit_by order by id, val limit 1, 2 by id limit 3 offset 1;