2020-01-08 09:13:12 +00:00
|
|
|
#include "AvroRowOutputFormat.h"
|
|
|
|
#if USE_AVRO
|
|
|
|
|
|
|
|
#include <Core/Defines.h>
|
|
|
|
#include <Core/Field.h>
|
|
|
|
#include <IO/Operators.h>
|
|
|
|
#include <IO/WriteBuffer.h>
|
|
|
|
#include <IO/WriteHelpers.h>
|
|
|
|
|
|
|
|
#include <Formats/verbosePrintString.h>
|
|
|
|
#include <Formats/FormatFactory.h>
|
|
|
|
|
|
|
|
#include <DataTypes/DataTypeArray.h>
|
|
|
|
#include <DataTypes/DataTypeDate.h>
|
|
|
|
#include <DataTypes/DataTypeDateTime.h>
|
|
|
|
#include <DataTypes/DataTypeDateTime64.h>
|
|
|
|
#include <DataTypes/DataTypeEnum.h>
|
|
|
|
#include <DataTypes/DataTypeLowCardinality.h>
|
|
|
|
#include <DataTypes/DataTypeNullable.h>
|
|
|
|
|
|
|
|
#include <Columns/ColumnArray.h>
|
|
|
|
#include <Columns/ColumnFixedString.h>
|
|
|
|
#include <Columns/ColumnLowCardinality.h>
|
|
|
|
#include <Columns/ColumnNullable.h>
|
|
|
|
#include <Columns/ColumnString.h>
|
|
|
|
#include <Columns/ColumnsNumber.h>
|
|
|
|
|
|
|
|
#include <avro/Compiler.hh>
|
|
|
|
#include <avro/DataFile.hh>
|
|
|
|
#include <avro/Decoder.hh>
|
|
|
|
#include <avro/Encoder.hh>
|
|
|
|
#include <avro/Generic.hh>
|
|
|
|
#include <avro/GenericDatum.hh>
|
|
|
|
#include <avro/Node.hh>
|
|
|
|
#include <avro/NodeConcepts.hh>
|
|
|
|
#include <avro/NodeImpl.hh>
|
|
|
|
#include <avro/Reader.hh>
|
|
|
|
#include <avro/Schema.hh>
|
|
|
|
#include <avro/Specific.hh>
|
|
|
|
#include <avro/ValidSchema.hh>
|
|
|
|
#include <avro/Writer.hh>
|
|
|
|
|
|
|
|
|
|
|
|
namespace DB
|
|
|
|
{
|
|
|
|
namespace ErrorCodes
|
|
|
|
{
|
2020-02-25 18:02:41 +00:00
|
|
|
extern const int ILLEGAL_COLUMN;
|
2020-01-08 09:13:12 +00:00
|
|
|
extern const int BAD_ARGUMENTS;
|
|
|
|
}
|
|
|
|
|
|
|
|
class OutputStreamWriteBufferAdapter : public avro::OutputStream
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
OutputStreamWriteBufferAdapter(WriteBuffer & out_) : out(out_) {}
|
|
|
|
|
|
|
|
virtual bool next(uint8_t ** data, size_t * len) override
|
|
|
|
{
|
|
|
|
out.nextIfAtEnd();
|
2020-01-09 04:22:49 +00:00
|
|
|
*data = reinterpret_cast<uint8_t *>(out.position());
|
2020-01-08 09:13:12 +00:00
|
|
|
*len = out.available();
|
|
|
|
out.position() += out.available();
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
virtual void backup(size_t len) override { out.position() -= len; }
|
|
|
|
|
|
|
|
virtual uint64_t byteCount() const override { return out.count(); }
|
|
|
|
virtual void flush() override { out.next(); }
|
|
|
|
|
|
|
|
private:
|
|
|
|
WriteBuffer & out;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2020-01-23 02:01:58 +00:00
|
|
|
AvroSerializer::SchemaWithSerializeFn AvroSerializer::createSchemaWithSerializeFn(DataTypePtr data_type, size_t & type_name_increment)
|
2020-01-08 09:13:12 +00:00
|
|
|
{
|
2020-01-23 02:01:58 +00:00
|
|
|
++type_name_increment;
|
|
|
|
|
2020-01-08 09:13:12 +00:00
|
|
|
switch (data_type->getTypeId())
|
|
|
|
{
|
|
|
|
case TypeIndex::UInt8:
|
2020-01-19 01:22:27 +00:00
|
|
|
return {avro::IntSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeInt(assert_cast<const ColumnUInt8 &>(column).getElement(row_num));
|
|
|
|
}};
|
|
|
|
case TypeIndex::Int8:
|
|
|
|
return {avro::IntSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeInt(assert_cast<const ColumnInt8 &>(column).getElement(row_num));
|
|
|
|
}};
|
|
|
|
case TypeIndex::UInt16:
|
|
|
|
return {avro::IntSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeInt(assert_cast<const ColumnUInt16 &>(column).getElement(row_num));
|
|
|
|
}};
|
|
|
|
case TypeIndex::Int16:
|
|
|
|
return {avro::IntSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeInt(assert_cast<const ColumnInt16 &>(column).getElement(row_num));
|
|
|
|
}};
|
|
|
|
case TypeIndex::UInt32: [[fallthrough]];
|
|
|
|
case TypeIndex::DateTime:
|
|
|
|
return {avro::IntSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
2020-01-10 05:59:01 +00:00
|
|
|
{
|
2020-01-19 01:22:27 +00:00
|
|
|
encoder.encodeInt(assert_cast<const ColumnUInt32 &>(column).getElement(row_num));
|
2020-01-10 05:59:01 +00:00
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
case TypeIndex::Int32:
|
2020-01-10 05:59:01 +00:00
|
|
|
return {avro::IntSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeInt(assert_cast<const ColumnInt32 &>(column).getElement(row_num));
|
|
|
|
}};
|
2020-01-19 01:22:27 +00:00
|
|
|
case TypeIndex::UInt64:
|
|
|
|
return {avro::LongSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeLong(assert_cast<const ColumnUInt64 &>(column).getElement(row_num));
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
case TypeIndex::Int64:
|
2020-01-10 05:59:01 +00:00
|
|
|
return {avro::LongSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeLong(assert_cast<const ColumnInt64 &>(column).getElement(row_num));
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
case TypeIndex::Float32:
|
2020-01-10 05:59:01 +00:00
|
|
|
return {avro::FloatSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeFloat(assert_cast<const ColumnFloat32 &>(column).getElement(row_num));
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
case TypeIndex::Float64:
|
2020-01-10 05:59:01 +00:00
|
|
|
return {avro::DoubleSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
encoder.encodeDouble(assert_cast<const ColumnFloat64 &>(column).getElement(row_num));
|
|
|
|
}};
|
|
|
|
case TypeIndex::Date:
|
|
|
|
{
|
2020-01-08 09:13:12 +00:00
|
|
|
auto schema = avro::IntSchema();
|
|
|
|
schema.root()->setLogicalType(avro::LogicalType(avro::LogicalType::DATE));
|
2020-01-10 05:59:01 +00:00
|
|
|
return {schema, [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
UInt16 date = assert_cast<const DataTypeDate::ColumnType &>(column).getElement(row_num);
|
|
|
|
encoder.encodeInt(date);
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
2020-01-10 05:59:01 +00:00
|
|
|
case TypeIndex::DateTime64:
|
|
|
|
{
|
2020-01-08 09:13:12 +00:00
|
|
|
auto schema = avro::LongSchema();
|
|
|
|
const auto & provided_type = assert_cast<const DataTypeDateTime64 &>(*data_type);
|
|
|
|
|
|
|
|
if (provided_type.getScale() == 3)
|
|
|
|
schema.root()->setLogicalType(avro::LogicalType(avro::LogicalType::TIMESTAMP_MILLIS));
|
|
|
|
else if (provided_type.getScale() == 6)
|
|
|
|
schema.root()->setLogicalType(avro::LogicalType(avro::LogicalType::TIMESTAMP_MICROS));
|
|
|
|
else
|
2020-01-10 05:59:01 +00:00
|
|
|
break;
|
2020-01-08 09:13:12 +00:00
|
|
|
|
2020-01-10 05:59:01 +00:00
|
|
|
return {schema, [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
const auto & col = assert_cast<const DataTypeDateTime64::ColumnType &>(column);
|
|
|
|
encoder.encodeLong(col.getElement(row_num));
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
|
|
|
case TypeIndex::String:
|
2020-01-19 01:22:27 +00:00
|
|
|
return {avro::BytesSchema(), [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
2020-01-10 05:59:01 +00:00
|
|
|
{
|
|
|
|
const StringRef & s = assert_cast<const ColumnString &>(column).getDataAt(row_num);
|
|
|
|
encoder.encodeBytes(reinterpret_cast<const uint8_t *>(s.data), s.size);
|
|
|
|
}};
|
|
|
|
case TypeIndex::FixedString:
|
|
|
|
{
|
2020-01-23 02:01:58 +00:00
|
|
|
auto size = data_type->getSizeOfValueInMemory();
|
2020-01-23 02:03:53 +00:00
|
|
|
auto schema = avro::FixedSchema(size, "fixed_" + toString(type_name_increment));
|
2020-01-10 05:59:01 +00:00
|
|
|
return {schema, [](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
const StringRef & s = assert_cast<const ColumnFixedString &>(column).getDataAt(row_num);
|
|
|
|
encoder.encodeFixed(reinterpret_cast<const uint8_t *>(s.data), s.size);
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
2020-01-10 05:59:01 +00:00
|
|
|
case TypeIndex::Enum8:
|
|
|
|
{
|
2020-01-23 02:01:58 +00:00
|
|
|
auto schema = avro::EnumSchema("enum8_" + toString(type_name_increment)); /// type names must be different for different types.
|
2020-01-08 09:13:12 +00:00
|
|
|
std::unordered_map<DataTypeEnum8::FieldType, size_t> enum_mapping;
|
|
|
|
const auto & enum_values = assert_cast<const DataTypeEnum8 &>(*data_type).getValues();
|
|
|
|
for (size_t i = 0; i < enum_values.size(); ++i)
|
|
|
|
{
|
|
|
|
schema.addSymbol(enum_values[i].first);
|
|
|
|
enum_mapping.emplace(enum_values[i].second, i);
|
|
|
|
}
|
2020-01-10 05:59:01 +00:00
|
|
|
return {schema, [enum_mapping](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
auto enum_value = assert_cast<const DataTypeEnum8::ColumnType &>(column).getElement(row_num);
|
|
|
|
encoder.encodeEnum(enum_mapping.at(enum_value));
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
2020-01-10 05:59:01 +00:00
|
|
|
case TypeIndex::Enum16:
|
|
|
|
{
|
2020-01-23 02:01:58 +00:00
|
|
|
auto schema = avro::EnumSchema("enum16" + toString(type_name_increment));
|
2020-01-08 09:13:12 +00:00
|
|
|
std::unordered_map<DataTypeEnum16::FieldType, size_t> enum_mapping;
|
|
|
|
const auto & enum_values = assert_cast<const DataTypeEnum16 &>(*data_type).getValues();
|
|
|
|
for (size_t i = 0; i < enum_values.size(); ++i)
|
|
|
|
{
|
|
|
|
schema.addSymbol(enum_values[i].first);
|
|
|
|
enum_mapping.emplace(enum_values[i].second, i);
|
|
|
|
}
|
2020-01-10 05:59:01 +00:00
|
|
|
return {schema, [enum_mapping](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
auto enum_value = assert_cast<const DataTypeEnum16::ColumnType &>(column).getElement(row_num);
|
|
|
|
encoder.encodeEnum(enum_mapping.at(enum_value));
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
2020-01-10 05:59:01 +00:00
|
|
|
case TypeIndex::Array:
|
|
|
|
{
|
2020-01-08 09:13:12 +00:00
|
|
|
const auto & array_type = assert_cast<const DataTypeArray &>(*data_type);
|
2020-01-23 02:01:58 +00:00
|
|
|
auto nested_mapping = createSchemaWithSerializeFn(array_type.getNestedType(), type_name_increment);
|
2020-01-10 05:59:01 +00:00
|
|
|
auto schema = avro::ArraySchema(nested_mapping.schema);
|
|
|
|
return {schema, [nested_mapping](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
const ColumnArray & column_array = assert_cast<const ColumnArray &>(column);
|
|
|
|
const ColumnArray::Offsets & offsets = column_array.getOffsets();
|
|
|
|
size_t offset = offsets[row_num - 1];
|
|
|
|
size_t next_offset = offsets[row_num];
|
|
|
|
size_t row_count = next_offset - offset;
|
|
|
|
const IColumn & nested_column = column_array.getData();
|
|
|
|
|
|
|
|
encoder.arrayStart();
|
|
|
|
if (row_count > 0)
|
|
|
|
{
|
|
|
|
encoder.setItemCount(row_count);
|
|
|
|
}
|
|
|
|
for (size_t i = offset; i < next_offset; ++i)
|
|
|
|
{
|
|
|
|
nested_mapping.serialize(nested_column, i, encoder);
|
|
|
|
}
|
|
|
|
encoder.arrayEnd();
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
2020-01-10 05:59:01 +00:00
|
|
|
case TypeIndex::Nullable:
|
|
|
|
{
|
2020-01-08 09:13:12 +00:00
|
|
|
auto nested_type = removeNullable(data_type);
|
2020-01-23 02:01:58 +00:00
|
|
|
auto nested_mapping = createSchemaWithSerializeFn(nested_type, type_name_increment);
|
2020-01-08 09:13:12 +00:00
|
|
|
if (nested_type->getTypeId() == TypeIndex::Nothing)
|
|
|
|
{
|
|
|
|
return nested_mapping;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
avro::UnionSchema union_schema;
|
|
|
|
union_schema.addType(avro::NullSchema());
|
|
|
|
union_schema.addType(nested_mapping.schema);
|
2020-01-10 05:59:01 +00:00
|
|
|
return {union_schema, [nested_mapping](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
const ColumnNullable & col = assert_cast<const ColumnNullable &>(column);
|
|
|
|
if (!col.isNullAt(row_num))
|
|
|
|
{
|
|
|
|
encoder.encodeUnionIndex(1);
|
|
|
|
nested_mapping.serialize(col.getNestedColumn(), row_num, encoder);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
encoder.encodeUnionIndex(0);
|
|
|
|
encoder.encodeNull();
|
|
|
|
}
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
|
|
|
}
|
2020-01-10 05:59:01 +00:00
|
|
|
case TypeIndex::LowCardinality:
|
|
|
|
{
|
2020-01-08 09:13:12 +00:00
|
|
|
const auto & nested_type = removeLowCardinality(data_type);
|
2020-01-23 02:01:58 +00:00
|
|
|
auto nested_mapping = createSchemaWithSerializeFn(nested_type, type_name_increment);
|
2020-01-10 05:59:01 +00:00
|
|
|
return {nested_mapping.schema, [nested_mapping](const IColumn & column, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
const auto & col = assert_cast<const ColumnLowCardinality &>(column);
|
|
|
|
nested_mapping.serialize(*col.getDictionary().getNestedColumn(), col.getIndexAt(row_num), encoder);
|
|
|
|
}};
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
|
|
|
case TypeIndex::Nothing:
|
|
|
|
return {avro::NullSchema(), [](const IColumn &, size_t, avro::Encoder & encoder) { encoder.encodeNull(); }};
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
throw Exception("Type " + data_type->getName() + " is not supported for Avro output", ErrorCodes::ILLEGAL_COLUMN);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
AvroSerializer::AvroSerializer(const ColumnsWithTypeAndName & columns)
|
|
|
|
{
|
|
|
|
avro::RecordSchema record_schema("row");
|
2020-01-23 02:01:58 +00:00
|
|
|
|
|
|
|
size_t type_name_increment = 0;
|
2020-01-08 09:13:12 +00:00
|
|
|
for (auto & column : columns)
|
|
|
|
{
|
|
|
|
try
|
|
|
|
{
|
2020-01-23 02:01:58 +00:00
|
|
|
auto field_mapping = createSchemaWithSerializeFn(column.type, type_name_increment);
|
2020-01-08 09:13:12 +00:00
|
|
|
serialize_fns.push_back(field_mapping.serialize);
|
|
|
|
//TODO: verify name starts with A-Za-z_
|
|
|
|
record_schema.addField(column.name, field_mapping.schema);
|
|
|
|
}
|
|
|
|
catch (Exception & e)
|
|
|
|
{
|
|
|
|
e.addMessage("column " + column.name);
|
2020-01-18 20:42:50 +00:00
|
|
|
throw;
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
schema.setSchema(record_schema);
|
|
|
|
}
|
|
|
|
|
|
|
|
void AvroSerializer::serializeRow(const Columns & columns, size_t row_num, avro::Encoder & encoder)
|
|
|
|
{
|
|
|
|
size_t num_columns = columns.size();
|
|
|
|
for (size_t i = 0; i < num_columns; ++i)
|
|
|
|
{
|
|
|
|
serialize_fns[i](*columns[i], row_num, encoder);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-23 02:01:58 +00:00
|
|
|
static avro::Codec getCodec(const std::string & codec_name)
|
2020-01-11 07:01:20 +00:00
|
|
|
{
|
|
|
|
if (codec_name == "")
|
|
|
|
{
|
|
|
|
#ifdef SNAPPY_CODEC_AVAILABLE
|
|
|
|
return avro::Codec::SNAPPY_CODEC;
|
|
|
|
#else
|
|
|
|
return avro::Codec::DEFLATE_CODEC;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
if (codec_name == "null") return avro::Codec::NULL_CODEC;
|
|
|
|
if (codec_name == "deflate") return avro::Codec::DEFLATE_CODEC;
|
|
|
|
#ifdef SNAPPY_CODEC_AVAILABLE
|
|
|
|
if (codec_name == "snappy") return avro::Codec::SNAPPY_CODEC;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
throw Exception("Avro codec " + codec_name + " is not available", ErrorCodes::BAD_ARGUMENTS);
|
|
|
|
}
|
|
|
|
|
2020-01-08 09:13:12 +00:00
|
|
|
AvroRowOutputFormat::AvroRowOutputFormat(
|
|
|
|
WriteBuffer & out_, const Block & header_, FormatFactory::WriteCallback callback, const FormatSettings & settings_)
|
|
|
|
: IRowOutputFormat(header_, out_, callback)
|
|
|
|
, settings(settings_)
|
|
|
|
, serializer(header_.getColumnsWithTypeAndName())
|
2020-01-11 07:01:20 +00:00
|
|
|
, file_writer(
|
|
|
|
std::make_unique<OutputStreamWriteBufferAdapter>(out_),
|
|
|
|
serializer.getSchema(),
|
|
|
|
settings.avro.output_sync_interval,
|
|
|
|
getCodec(settings.avro.output_codec))
|
2020-01-08 09:13:12 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
AvroRowOutputFormat::~AvroRowOutputFormat() = default;
|
|
|
|
|
|
|
|
void AvroRowOutputFormat::writePrefix()
|
|
|
|
{
|
|
|
|
file_writer.syncIfNeeded();
|
|
|
|
}
|
|
|
|
|
|
|
|
void AvroRowOutputFormat::write(const Columns & columns, size_t row_num)
|
|
|
|
{
|
|
|
|
file_writer.syncIfNeeded();
|
|
|
|
serializer.serializeRow(columns, row_num, file_writer.encoder());
|
|
|
|
file_writer.incr();
|
|
|
|
}
|
|
|
|
|
|
|
|
void AvroRowOutputFormat::writeSuffix()
|
|
|
|
{
|
|
|
|
file_writer.close();
|
|
|
|
}
|
|
|
|
|
|
|
|
void registerOutputFormatProcessorAvro(FormatFactory & factory)
|
|
|
|
{
|
2020-01-18 20:12:58 +00:00
|
|
|
factory.registerOutputFormatProcessor("Avro", [](
|
2020-01-10 05:59:01 +00:00
|
|
|
WriteBuffer & buf,
|
|
|
|
const Block & sample,
|
|
|
|
FormatFactory::WriteCallback callback,
|
|
|
|
const FormatSettings & settings)
|
|
|
|
{
|
|
|
|
return std::make_shared<AvroRowOutputFormat>(buf, sample, callback, settings);
|
|
|
|
});
|
2020-01-08 09:13:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
#else
|
|
|
|
|
|
|
|
namespace DB
|
|
|
|
{
|
|
|
|
class FormatFactory;
|
|
|
|
void registerOutputFormatProcessorAvro(FormatFactory &)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|