Removed obsolete code [#CLICKHOUSE-3270].

This commit is contained in:
Alexey Milovidov 2017-09-01 21:21:01 +03:00
parent 1a8e22c37f
commit 2af8747164
23 changed files with 37 additions and 36 deletions

View File

@ -176,7 +176,7 @@ size_t ColumnAggregateFunction::byteSize() const
size_t res = getData().size() * sizeof(getData()[0]);
for (const auto & arena : arenas)
res += arena.get()->size();
res += arena->size();
return res;
}
@ -188,7 +188,7 @@ size_t ColumnAggregateFunction::allocatedBytes() const
size_t res = getData().allocated_bytes();
for (const auto & arena : arenas)
res += arena.get()->size();
res += arena->size();
return res;
}

View File

@ -328,7 +328,7 @@ ColumnPtr ColumnArray::convertToFullColumnIfConst() const
else
new_data = data;
if (auto full_column = offsets.get()->convertToFullColumnIfConst())
if (auto full_column = offsets->convertToFullColumnIfConst())
new_offsets = full_column;
else
new_offsets = offsets;

View File

@ -129,7 +129,7 @@ public:
virtual ColumnPtr cut(size_t start, size_t length) const
{
ColumnPtr res = cloneEmpty();
res.get()->insertRangeFrom(*this, start, length);
res->insertRangeFrom(*this, start, length);
return res;
}

View File

@ -2,6 +2,7 @@
#include <type_traits>
#include <typeinfo>
#include <typeindex>
#include <string>
#include <Common/Exception.h>

View File

@ -19,7 +19,7 @@ bool BinaryRowInputStream::read(Block & block)
size_t columns = block.columns();
for (size_t i = 0; i < columns; ++i)
block.getByPosition(i).type.get()->deserializeBinary(*block.getByPosition(i).column.get(), istr);
block.getByPosition(i).type->deserializeBinary(*block.getByPosition(i).column.get(), istr);
return true;
}

View File

@ -28,7 +28,7 @@ void BlockOutputStreamFromRowOutputStream::write(const Block & block)
row_output->writeFieldDelimiter();
auto & col = block.getByPosition(j);
row_output->writeField(*col.column.get(), *col.type.get(), i);
row_output->writeField(*col.column, *col.type, i);
}
row_output->writeRowEndDelimiter();

View File

@ -125,7 +125,7 @@ bool CSVRowInputStream::read(Block & block)
for (size_t i = 0; i < size; ++i)
{
skipWhitespacesAndTabs(istr);
data_types[i].get()->deserializeTextCSV(*block.getByPosition(i).column.get(), istr, delimiter);
data_types[i]->deserializeTextCSV(*block.getByPosition(i).column.get(), istr, delimiter);
skipWhitespacesAndTabs(istr);
skipDelimiter(istr, delimiter, i + 1 == size);

View File

@ -17,7 +17,7 @@ void IRowOutputStream::write(const Block & block, size_t row_num)
writeFieldDelimiter();
auto & col = block.getByPosition(i);
writeField(*col.column.get(), *col.type.get(), row_num);
writeField(*col.column, *col.type, row_num);
}
writeRowEndDelimiter();

View File

@ -120,7 +120,7 @@ bool JSONEachRowRowInputStream::read(Block & block)
read_columns[index] = true;
auto & col = block.getByPosition(index);
col.type.get()->deserializeTextJSON(*col.column.get(), istr);
col.type->deserializeTextJSON(*col.column, istr);
}
skipWhitespaceIfAny(istr);
@ -130,7 +130,7 @@ bool JSONEachRowRowInputStream::read(Block & block)
/// Fill non-visited columns with the default values.
for (size_t i = 0; i < columns; ++i)
if (!read_columns[i])
block.getByPosition(i).column.get()->insertDefault();
block.getByPosition(i).column->insertDefault();
return true;
}

View File

@ -33,7 +33,7 @@ void ODBCDriverBlockOutputStream::write(const Block & block)
{
WriteBufferFromString text_out(text_value);
col.type->serializeText(*col.column.get(), i, text_out);
col.type->serializeText(*col.column, i, text_out);
}
writeStringBinary(text_value, out);

View File

@ -136,7 +136,7 @@ bool TSKVRowInputStream::read(Block & block)
read_columns[index] = true;
auto & col = block.getByPosition(index);
col.type.get()->deserializeTextEscaped(*col.column.get(), istr);
col.type->deserializeTextEscaped(*col.column, istr);
}
}
else
@ -177,7 +177,7 @@ bool TSKVRowInputStream::read(Block & block)
/// Fill in the not met columns with default values.
for (size_t i = 0; i < columns; ++i)
if (!read_columns[i])
block.getByPosition(i).column.get()->insertDefault();
block.getByPosition(i).column->insertDefault();
return true;
}

View File

@ -23,7 +23,7 @@ void TabSeparatedBlockOutputStream::write(const Block & block)
{
if (j != 0)
ostr.write('\t');
col.type->serializeTextEscaped(*col.column.get(), j, ostr);
col.type->serializeTextEscaped(*col.column, j, ostr);
}
ostr.write('\n');
}

View File

@ -84,7 +84,7 @@ bool TabSeparatedRowInputStream::read(Block & block)
for (size_t i = 0; i < size; ++i)
{
data_types[i].get()->deserializeTextEscaped(*block.getByPosition(i).column.get(), istr);
data_types[i]->deserializeTextEscaped(*block.getByPosition(i).column.get(), istr);
/// skip separators
if (i + 1 == size)

View File

@ -63,7 +63,7 @@ bool ValuesRowInputStream::read(Block & block)
bool rollback_on_exception = false;
try
{
col.type.get()->deserializeTextQuoted(*col.column.get(), istr);
col.type->deserializeTextQuoted(*col.column, istr);
rollback_on_exception = true;
skipWhitespaceIfAny(istr);
@ -94,7 +94,7 @@ bool ValuesRowInputStream::read(Block & block)
throw;
if (rollback_on_exception)
col.column.get()->popBack(1);
col.column->popBack(1);
IDataType & type = *block.safeGetByPosition(i).type;

View File

@ -152,7 +152,7 @@ void VerticalRowOutputStream::writeSpecialRow(const Block & block, size_t row_nu
writeFieldDelimiter();
auto & col = block.getByPosition(i);
writeField(*col.column.get(), *col.type.get(), row_num);
writeField(*col.column, *col.type, row_num);
}
}

View File

@ -70,7 +70,7 @@ void DataTypeAggregateFunction::deserializeBinary(Field & field, ReadBuffer & is
void DataTypeAggregateFunction::serializeBinary(const IColumn & column, size_t row_num, WriteBuffer & ostr) const
{
function.get()->serialize(static_cast<const ColumnAggregateFunction &>(column).getData()[row_num], ostr);
function->serialize(static_cast<const ColumnAggregateFunction &>(column).getData()[row_num], ostr);
}
void DataTypeAggregateFunction::deserializeBinary(IColumn & column, ReadBuffer & istr) const
@ -147,7 +147,7 @@ void DataTypeAggregateFunction::deserializeBinaryBulk(IColumn & column, ReadBuff
static String serializeToString(const AggregateFunctionPtr & function, const IColumn & column, size_t row_num)
{
WriteBufferFromOwnString buffer;
function.get()->serialize(static_cast<const ColumnAggregateFunction &>(column).getData()[row_num], buffer);
function->serialize(static_cast<const ColumnAggregateFunction &>(column).getData()[row_num], buffer);
return buffer.str();
}

View File

@ -809,7 +809,7 @@ void MergeTreeData::createConvertExpression(const DataPartPtr & part, const Name
{
if (!new_types.count(column.name))
{
bool is_nullable = column.type.get()->isNullable();
bool is_nullable = column.type->isNullable();
if (!part || part->hasColumnFiles(column.name))
{
@ -1049,7 +1049,7 @@ MergeTreeData::AlterDataPartTransactionPtr MergeTreeData::alterDataPart(
for (size_t i = 0, size = part->size; i < size; ++i)
for (size_t j = 0; j < new_key_size; ++j)
new_primary_key_sample.getByPosition(j).type.get()->serializeBinary(*new_index[j].get(), i, index_stream);
new_primary_key_sample.getByPosition(j).type->serializeBinary(*new_index[j].get(), i, index_stream);
transaction->rename_map["primary.idx.tmp"] = "primary.idx";

View File

@ -630,8 +630,8 @@ void MergeTreeDataPart::loadIndex()
for (size_t i = 0; i < key_size; ++i)
{
index[i] = storage.primary_key_data_types[i].get()->createColumn();
index[i].get()->reserve(size);
index[i] = storage.primary_key_data_types[i]->createColumn();
index[i]->reserve(size);
}
String index_path = getFullPath() + "primary.idx";
@ -640,12 +640,12 @@ void MergeTreeDataPart::loadIndex()
for (size_t i = 0; i < size; ++i)
for (size_t j = 0; j < key_size; ++j)
storage.primary_key_data_types[j].get()->deserializeBinary(*index[j].get(), index_file);
storage.primary_key_data_types[j]->deserializeBinary(*index[j].get(), index_file);
for (size_t i = 0; i < key_size; ++i)
if (index[i].get()->size() != size)
if (index[i]->size() != size)
throw Exception("Cannot read all data from index file " + index_path
+ "(expected size: " + toString(size) + ", read: " + toString(index[i].get()->size()) + ")",
+ "(expected size: " + toString(size) + ", read: " + toString(index[i]->size()) + ")",
ErrorCodes::CANNOT_READ_ALL_DATA);
if (!index_file.eof())

View File

@ -854,7 +854,7 @@ MarkRanges MergeTreeDataSelectExecutor::markRangesFromPKRange(
MarkRanges res;
size_t used_key_size = key_condition.getMaxKeyColumn() + 1;
size_t marks_count = index.at(0).get()->size();
size_t marks_count = index.at(0)->size();
/// If index is not used.
if (key_condition.alwaysUnknownOrTrue())

View File

@ -332,7 +332,7 @@ void MergeTreePartChecker::checkDataPart(
Columns tmp_columns(key_size);
for (size_t j = 0; j < key_size; ++j)
tmp_columns[j] = primary_key_data_types[j].get()->createColumn();
tmp_columns[j] = primary_key_data_types[j]->createColumn();
while (!hashing_buf.eof())
{
@ -341,7 +341,7 @@ void MergeTreePartChecker::checkDataPart(
++marks_in_primary_key;
for (size_t j = 0; j < key_size; ++j)
primary_key_data_types[j].get()->deserializeBinary(*tmp_columns[j].get(), hashing_buf);
primary_key_data_types[j]->deserializeBinary(*tmp_columns[j].get(), hashing_buf);
}
}
else

View File

@ -110,7 +110,7 @@ size_t MergeTreeReader::readRows(size_t from_mark, bool continue_reading, size_t
const IDataType * observed_type;
bool is_nullable;
if (column.type.get()->isNullable())
if (column.type->isNullable())
{
const DataTypeNullable & nullable_type = static_cast<const DataTypeNullable &>(*column.type);
observed_type = nullable_type.getNestedType().get();

View File

@ -500,7 +500,7 @@ void MergedBlockOutputStream::writeImpl(const Block & block, const IColumn::Perm
{
index_columns.resize(sort_description.size());
for (size_t i = 0, size = sort_description.size(); i < size; ++i)
index_columns[i] = primary_columns[i].column.get()->cloneEmpty();
index_columns[i] = primary_columns[i].column->cloneEmpty();
}
/// Now write the data.
@ -545,8 +545,8 @@ void MergedBlockOutputStream::writeImpl(const Block & block, const IColumn::Perm
for (size_t j = 0, size = primary_columns.size(); j < size; ++j)
{
const IColumn & primary_column = *primary_columns[j].column.get();
index_columns[j].get()->insertFrom(primary_column, i);
primary_columns[j].type.get()->serializeBinary(primary_column, i, *index_stream);
index_columns[j]->insertFrom(primary_column, i);
primary_columns[j].type->serializeBinary(primary_column, i, *index_stream);
}
}

View File

@ -28,8 +28,8 @@ private:
{
bool operator()(const LogEntryPtr & lhs, const LogEntryPtr & rhs) const
{
return std::forward_as_tuple(lhs.get()->create_time, lhs.get())
< std::forward_as_tuple(rhs.get()->create_time, rhs.get());
return std::forward_as_tuple(lhs->create_time, lhs.get())
< std::forward_as_tuple(rhs->create_time, rhs.get());
}
};