Fix test, add more comments. [#CLICKHOUSE-2070]

This commit is contained in:
Vitaliy Lyudvichenko 2017-03-20 19:39:28 +03:00 committed by alexey-milovidov
parent 44c9bad289
commit 0290203f6d
5 changed files with 57 additions and 34 deletions

View File

@ -11,7 +11,19 @@ namespace ErrorCodes
extern const int CURRENT_WRITE_BUFFER_IS_EXHAUSTED;
}
/* The buffer is similar to ConcatReadBuffer, but writes data
*
* It has WriteBuffers sequence [prepared_sources, lazy_sources]
* (lazy_sources contains not pointers themself, but their delayed constructors)
*
* Firtly, CascadeWriteBuffer redirects data to first buffer of the sequence
* If current WriteBuffer cannot recieve data anymore, it throws special exception CURRENT_WRITE_BUFFER_IS_EXHAUSTED in nextImpl() body,
* CascadeWriteBuffer prepare next buffer and continuously redirects data to it.
* If there are no buffers anymore CascadeWriteBuffer throws an exception.
*
* NOTE: If you use one of underlying WriteBuffers buffers outside, you need sync its position() with CascadeWriteBuffer's position().
* The sync is performed into nextImpl(), getResultBuffers() and destructor.
*/
class CascadeWriteBuffer : public WriteBuffer
{
public:

View File

@ -61,6 +61,8 @@ void CascadeWriteBuffer::getResultBuffers(WriteBufferPtrs & res)
curr_buffer = nullptr;
curr_buffer_num = num_sources = 0;
prepared_sources.clear();
lazy_sources.clear();
}

View File

@ -149,7 +149,7 @@ void HTTPHandler::pushDelayedResults(Output & used_output)
}
ConcatReadBuffer concat_read_buffer(read_buffers_raw_ptr);
copyData(concat_read_buffer, *used_output.out);
copyData(concat_read_buffer, *used_output.out_maybe_compressed);
}
@ -446,7 +446,7 @@ void HTTPHandler::processQuery(
if (used_output.hasDelayed())
{
/// TODO: set Content-Length if possible (?)
/// TODO: set Content-Length if possible
pushDelayedResults(used_output);
}
@ -490,6 +490,10 @@ void HTTPHandler::trySendExceptionToClient(const std::string & s, int exception_
}
else if (used_output.out_maybe_compressed)
{
/// Destroy CascadeBuffer to actualize buffers' positions and reset extra references
if (used_output.hasDelayed())
used_output.out_maybe_delayed_and_compressed.reset();
/// Send the error message into already used (and possibly compressed) stream.
/// Note that the error message will possibly be sent after some data.
/// Also HTTP code 200 could have already been sent.
@ -505,7 +509,9 @@ void HTTPHandler::trySendExceptionToClient(const std::string & s, int exception_
writeString(s, *used_output.out_maybe_compressed);
writeChar('\n', *used_output.out_maybe_compressed);
used_output.out_maybe_compressed->next();
used_output.out->next();
used_output.out->finalize();
}
}

View File

@ -1,12 +1,11 @@
#!/bin/bash
set -e
max_block_size=10
max_block_size=100
URL='http://localhost:8123/'
function query {
echo "SELECT toUInt8(intHash64(number)) FROM system.numbers LIMIT $1 FORMAT RowBinary"
#echo "SELECT toUInt8(number) FROM system.numbers LIMIT $1 FORMAT RowBinary"
}
function ch_url() {
@ -23,47 +22,49 @@ function check_only_exception() {
#(echo "$res")
#(echo "$res" | wc -l)
#(echo "$res" | grep -c "^$exception_pattern")
[[ `echo "$res" | wc -l` -eq 1 ]] && echo OK || echo FAIL
[[ $(echo "$res" | grep -c "^$exception_pattern") -eq 1 ]] && echo OK || echo FAIL
[[ `echo "$res" | wc -l` -eq 1 ]] || echo FAIL
[[ $(echo "$res" | grep -c "^$exception_pattern") -eq 1 ]] || echo FAIL
}
function check_last_line_exception() {
local res=`ch_url "$1" "$2"`
echo "$res" > res
#echo "$res" > res
#echo "$res" | wc -c
#echo "$res" | tail -n -2
[[ $(echo "$res" | tail -n -1 | grep -c "$exception_pattern") -eq 1 ]] && echo OK || echo FAIL
[[ $(echo "$res" | head -n -1 | grep -c "$exception_pattern") -eq 0 ]] && echo OK || echo FAIL
[[ $(echo "$res" | tail -n -1 | grep -c "$exception_pattern") -eq 1 ]] || echo FAIL
[[ $(echo "$res" | head -n -1 | grep -c "$exception_pattern") -eq 0 ]] || echo FAIL
}
function check_exception_handling() {
check_only_exception "max_result_bytes=1000" 1001
check_only_exception "max_result_bytes=1000&wait_end_of_query=1" 1001
echo
check_only_exception "max_result_bytes=1048576&buffer_size=1048576&wait_end_of_query=0" 1048577
check_only_exception "max_result_bytes=1048576&buffer_size=1048576&wait_end_of_query=1" 1048577
echo
check_only_exception "max_result_bytes=1500000&buffer_size=2500000&wait_end_of_query=0" 1500001
check_only_exception "max_result_bytes=1500000&buffer_size=1500000&wait_end_of_query=1" 1500001
echo
check_only_exception "max_result_bytes=4000000&buffer_size=2000000&wait_end_of_query=1" 5000000
check_only_exception "max_result_bytes=4000000&wait_end_of_query=1" 5000000
check_last_line_exception "max_result_bytes=4000000&buffer_size=2000000&wait_end_of_query=0" 5000000
check_only_exception "max_result_bytes=1000" 1001
check_only_exception "max_result_bytes=1000&wait_end_of_query=1" 1001
check_only_exception "max_result_bytes=1048576&buffer_size=1048576&wait_end_of_query=0" 1048577
check_only_exception "max_result_bytes=1048576&buffer_size=1048576&wait_end_of_query=1" 1048577
check_only_exception "max_result_bytes=1500000&buffer_size=2500000&wait_end_of_query=0" 1500001
check_only_exception "max_result_bytes=1500000&buffer_size=1500000&wait_end_of_query=1" 1500001
check_only_exception "max_result_bytes=4000000&buffer_size=2000000&wait_end_of_query=1" 5000000
check_only_exception "max_result_bytes=4000000&wait_end_of_query=1" 5000000
check_last_line_exception "max_result_bytes=4000000&buffer_size=2000000&wait_end_of_query=0" 5000000
}
#check_exception_handling
check_exception_handling
# Tune setting to speed up combinatorial test
max_block_size=500000
corner_sizes="1048576 `seq 500000 1000000 3500000`"
# Check HTTP results with clickhouse-client in normal case
function cmp_cli_and_http() {
clickhouse-client -q "`query $1`" > res1
ch_url "buffer_size=$2&wait_end_of_query=0" "$1" > res2
ch_url "buffer_size=$2&wait_end_of_query=1" "$1" > res3
cmp res1 res2
cmp res1 res3
cmp res1 res2 && cmp res1 res3 || echo FAIL
rm -rf res1 res2 res3
}
@ -78,7 +79,9 @@ function check_cli_and_http() {
check_cli_and_http
# Check HTTP internal compression in normal case (clickhouse-compressor required)
# Check HTTP internal compression in normal case
# Skip if clickhouse-compressor not installed
function cmp_http_compression() {
clickhouse-client -q "`query $1`" > res0
@ -101,4 +104,7 @@ function check_http_compression() {
}
has_compressor=$(command -v clickhouse-compressor &>/dev/null && echo 1)
[[ has_compressor ]] && check_http_compression || true
if [[ $has_compressor -eq 1 ]]; then
check_http_compression
fi

View File

@ -2,6 +2,7 @@
#include <boost/program_options.hpp>
#include <DB/Common/Exception.h>
#include <DB/IO/WriteBufferFromFileDescriptor.h>
#include <DB/IO/ReadBufferFromFileDescriptor.h>
#include <DB/IO/CompressedWriteBuffer.h>
@ -116,14 +117,10 @@ int main(int argc, char ** argv)
DB::copyData(rb, to);
}
}
catch (const DB::Exception & e)
catch (...)
{
std::cerr << e.what() << ", " << e.message() << std::endl
<< std::endl
<< "Stack trace:" << std::endl
<< e.getStackTrace().toString()
<< std::endl;
throw;
std::cerr << DB::getCurrentExceptionMessage(true);
return DB::getCurrentExceptionCode();
}
return 0;