ClickHouse/dbms/src/DataStreams/BlockStreamProfileInfo.cpp

130 lines
2.9 KiB
C++
Raw Normal View History

2015-01-18 08:25:56 +00:00
#include <DB/IO/ReadHelpers.h>
#include <DB/IO/WriteHelpers.h>
#include <DB/Core/Block.h>
#include <DB/DataStreams/BlockStreamProfileInfo.h>
namespace DB
{
void BlockStreamProfileInfo::read(ReadBuffer & in)
{
readVarUInt(rows, in);
readVarUInt(blocks, in);
readVarUInt(bytes, in);
readBinary(applied_limit, in);
readVarUInt(rows_before_limit, in);
readBinary(calculated_rows_before_limit, in);
}
void BlockStreamProfileInfo::write(WriteBuffer & out) const
{
writeVarUInt(rows, out);
writeVarUInt(blocks, out);
writeVarUInt(bytes, out);
writeBinary(hasAppliedLimit(), out);
writeVarUInt(getRowsBeforeLimit(), out);
writeBinary(calculated_rows_before_limit, out);
}
void BlockStreamProfileInfo::setFrom(const BlockStreamProfileInfo & rhs, bool skip_block_size_info)
2016-01-25 21:40:13 +00:00
{
if (!skip_block_size_info)
{
rows = rhs.rows;
blocks = rhs.blocks;
bytes = rhs.bytes;
}
2016-01-25 21:40:13 +00:00
applied_limit = rhs.applied_limit;
rows_before_limit = rhs.rows_before_limit;
calculated_rows_before_limit = rhs.calculated_rows_before_limit;
}
2015-01-18 08:25:56 +00:00
size_t BlockStreamProfileInfo::getRowsBeforeLimit() const
{
if (!calculated_rows_before_limit)
calculateRowsBeforeLimit();
return rows_before_limit;
}
bool BlockStreamProfileInfo::hasAppliedLimit() const
{
if (!calculated_rows_before_limit)
calculateRowsBeforeLimit();
return applied_limit;
}
void BlockStreamProfileInfo::update(Block & block)
{
++blocks;
rows += block.rows();
2015-01-18 08:25:56 +00:00
bytes += block.bytes();
}
void BlockStreamProfileInfo::collectInfosForStreamsWithName(const char * name, BlockStreamProfileInfos & res) const
{
if (stream_name == name)
{
res.push_back(this);
return;
}
for (const auto & nested_info : nested_infos)
nested_info->collectInfosForStreamsWithName(name, res);
}
void BlockStreamProfileInfo::calculateRowsBeforeLimit() const
{
calculated_rows_before_limit = true;
2017-03-25 20:12:56 +00:00
/// is there a Limit?
2015-01-18 08:25:56 +00:00
BlockStreamProfileInfos limits;
collectInfosForStreamsWithName("Limit", limits);
2016-01-25 21:40:13 +00:00
if (!limits.empty())
{
applied_limit = true;
2015-01-18 08:25:56 +00:00
/** Take the number of lines read below `PartialSorting`, if any, or below `Limit`.
* This is necessary, because sorting can return only part of the rows.
2016-01-25 21:40:13 +00:00
*/
BlockStreamProfileInfos partial_sortings;
collectInfosForStreamsWithName("PartialSorting", partial_sortings);
2015-01-18 08:25:56 +00:00
2016-01-25 21:40:13 +00:00
BlockStreamProfileInfos & limits_or_sortings = partial_sortings.empty() ? limits : partial_sortings;
2015-01-18 08:25:56 +00:00
2016-01-25 21:40:13 +00:00
for (const auto & info_limit_or_sort : limits_or_sortings)
for (const auto & nested_info : info_limit_or_sort->nested_infos)
rows_before_limit += nested_info->rows;
}
else
{
/// Then the data about `rows_before_limit` can be in `RemoteBlockInputStream` (come from a remote server).
2016-01-25 21:40:13 +00:00
BlockStreamProfileInfos remotes;
collectInfosForStreamsWithName("Remote", remotes);
if (remotes.empty())
return;
for (const auto & info : remotes)
{
if (info->applied_limit)
{
applied_limit = true;
rows_before_limit += info->rows_before_limit;
}
}
}
2015-01-18 08:25:56 +00:00
}
}