Fix error

This commit is contained in:
Alexey Milovidov 2024-06-17 04:57:16 +02:00
parent edbd57eeb5
commit d2d8006037
2 changed files with 20 additions and 4 deletions

View File

@ -21,6 +21,7 @@ class Tokens
{
private:
std::vector<Token> data;
size_t max_pos = 0;
Lexer lexer;
bool skip_insignificant;
@ -35,10 +36,16 @@ public:
while (true)
{
if (index < data.size())
{
max_pos = std::max(max_pos, index);
return data[index];
}
if (!data.empty() && data.back().isEnd())
{
max_pos = data.size() - 1;
return data.back();
}
Token token = lexer.nextToken();
@ -51,7 +58,12 @@ public:
{
if (data.empty())
return (*this)[0];
return data.back();
return data[max_pos];
}
void reset()
{
max_pos = 0;
}
};

View File

@ -285,6 +285,8 @@ ASTPtr tryParseQuery(
return nullptr;
}
Expected expected;
/** A shortcut - if Lexer found invalid tokens, fail early without full parsing.
* But there are certain cases when invalid tokens are permitted:
* 1. INSERT queries can have arbitrary data after the FORMAT clause, that is parsed by a different parser.
@ -293,9 +295,9 @@ ASTPtr tryParseQuery(
*
* This shortcut is needed to avoid complex backtracking in case of obviously erroneous queries.
*/
IParser::Pos lookahead = token_iterator;
if (!ParserKeyword(Keyword::INSERT_INTO).ignore(lookahead))
if (!ParserKeyword(Keyword::INSERT_INTO).check(token_iterator, expected))
{
IParser::Pos lookahead(token_iterator);
while (lookahead->type != TokenType::Semicolon && lookahead->type != TokenType::EndOfStream)
{
if (lookahead->isError())
@ -306,9 +308,11 @@ ASTPtr tryParseQuery(
++lookahead;
}
/// We should not spoil the info about maximum parsed position in the original iterator.
tokens.reset();
}
Expected expected;
ASTPtr res;
const bool parse_res = parser.parse(token_iterator, res, expected);
const auto last_token = token_iterator.max();