Fix bug with long http insert queries processing

This commit is contained in:
alesapin 2019-02-20 12:22:13 +03:00
parent 1b0ae80a49
commit ef7beb1596
3 changed files with 19 additions and 1 deletions

View File

@ -462,12 +462,16 @@ void executeQuery(
size_t max_query_size = context.getSettingsRef().max_query_size;
bool may_have_tail;
if (istr.buffer().end() - istr.position() > static_cast<ssize_t>(max_query_size))
{
/// If remaining buffer space in 'istr' is enough to parse query up to 'max_query_size' bytes, then parse inplace.
begin = istr.position();
end = istr.buffer().end();
istr.position() += end - begin;
/// Actually we don't know will query has additional data or not.
/// But we can't check istr.eof(), because begin and end pointers will became invalid
may_have_tail = true;
}
else
{
@ -479,12 +483,14 @@ void executeQuery(
begin = parse_buf.data();
end = begin + parse_buf.size();
/// Can check stream for eof, because we have copied data
may_have_tail = !istr.eof();
}
ASTPtr ast;
BlockIO streams;
std::tie(ast, streams) = executeQueryImpl(begin, end, context, false, QueryProcessingStage::Complete, !istr.eof());
std::tie(ast, streams) = executeQueryImpl(begin, end, context, false, QueryProcessingStage::Complete, may_have_tail);
try
{

View File

@ -0,0 +1 @@
1000000

View File

@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -e
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
. $CURDIR/../shell_config.sh
echo 'DROP TABLE IF EXISTS test.table_for_insert' | ${CLICKHOUSE_CURL} -sSg ${CLICKHOUSE_URL} -d @-
echo 'CREATE TABLE test.table_for_insert (a UInt8, b UInt8) ENGINE = Memory' | ${CLICKHOUSE_CURL} -sSg ${CLICKHOUSE_URL} -d @-
echo "INSERT INTO test.table_for_insert VALUES `printf '%*s' "1000000" | sed 's/ /(1, 2)/g'`" | ${CLICKHOUSE_CURL} -sSg ${CLICKHOUSE_URL} -d @-
echo 'SELECT count(*) FROM test.table_for_insert' | ${CLICKHOUSE_CURL} -sSg ${CLICKHOUSE_URL} -d @-
echo 'DROP TABLE IF EXISTS test.table_for_insert' | ${CLICKHOUSE_CURL} -sSg ${CLICKHOUSE_URL} -d @-