Merge pull request #8463 from ClickHouse/bounding-ratio-performance-degradation-fix

Fixed performance degradation of "boundingRatio" aggregate function
This commit is contained in:
alexey-milovidov 2019-12-29 22:40:21 +03:00 committed by GitHub
commit 70f45d6a54
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 7 additions and 10 deletions

View File

@ -129,9 +129,9 @@ public:
void add(AggregateDataPtr place, const IColumn ** columns, const size_t row_num, Arena *) const override void add(AggregateDataPtr place, const IColumn ** columns, const size_t row_num, Arena *) const override
{ {
/// TODO Inefficient. /// NOTE Slightly inefficient.
const auto x = applyVisitor(FieldVisitorConvertToNumber<Float64>(), (*columns[0])[row_num]); const auto x = columns[0]->getFloat64(row_num);
const auto y = applyVisitor(FieldVisitorConvertToNumber<Float64>(), (*columns[1])[row_num]); const auto y = columns[1]->getFloat64(row_num);
data(place).add(x, y); data(place).add(x, y);
} }

View File

@ -131,9 +131,7 @@ public:
/** Contains a loop with calls to "add" function. You can collect arguments into array "places" /** Contains a loop with calls to "add" function. You can collect arguments into array "places"
* and do a single call to "addBatch" for devirtualization and inlining. * and do a single call to "addBatch" for devirtualization and inlining.
*/ */
virtual void virtual void addBatch(size_t batch_size, AggregateDataPtr * places, size_t place_offset, const IColumn ** columns, Arena * arena) const = 0;
addBatch(size_t batch_size, AggregateDataPtr * places, size_t place_offset, const IColumn ** columns, Arena * arena)
const = 0;
/** The same for single place. /** The same for single place.
*/ */
@ -144,9 +142,8 @@ public:
* -Array combinator. It might also be used generally to break data dependency when array * -Array combinator. It might also be used generally to break data dependency when array
* "places" contains a large number of same values consecutively. * "places" contains a large number of same values consecutively.
*/ */
virtual void virtual void addBatchArray(
addBatchArray(size_t batch_size, AggregateDataPtr * places, size_t place_offset, const IColumn ** columns, const UInt64 * offsets, Arena * arena) size_t batch_size, AggregateDataPtr * places, size_t place_offset, const IColumn ** columns, const UInt64 * offsets, Arena * arena) const = 0;
const = 0;
const DataTypes & getArgumentTypes() const { return argument_types; } const DataTypes & getArgumentTypes() const { return argument_types; }
const Array & getParameters() const { return parameters; } const Array & getParameters() const { return parameters; }

View File

@ -316,7 +316,7 @@ BlockInputStreamPtr MongoDBDictionarySource::loadKeys(const Columns & key_column
case AttributeUnderlyingType::utFloat32: case AttributeUnderlyingType::utFloat32:
case AttributeUnderlyingType::utFloat64: case AttributeUnderlyingType::utFloat64:
key.add(attr.second.name, applyVisitor(FieldVisitorConvertToNumber<Float64>(), (*key_columns[attr.first])[row_idx])); key.add(attr.second.name, key_columns[attr.first]->getFloat64(row_idx));
break; break;
case AttributeUnderlyingType::utString: case AttributeUnderlyingType::utString: