mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 16:42:05 +00:00
linear regression tests
This commit is contained in:
parent
f550304fa4
commit
35a266c96a
@ -19,6 +19,9 @@ AggregateFunctionPtr createAggregateFunctionMLMethod(
|
||||
if (parameters.size() > 4)
|
||||
throw Exception("Aggregate function " + name + " requires at most four parameters", ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
|
||||
if (argument_types.size() < 2)
|
||||
throw Exception("Aggregate function " + name + " requires at least two arguments", ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
|
||||
for (size_t i = 0; i < argument_types.size(); ++i)
|
||||
{
|
||||
if (!WhichDataType(argument_types[i]).isFloat64())
|
||||
@ -28,58 +31,58 @@ AggregateFunctionPtr createAggregateFunctionMLMethod(
|
||||
}
|
||||
|
||||
Float64 learning_rate = Float64(0.01);
|
||||
Float64 l2_reg_coef = Float64(0.01);
|
||||
UInt32 batch_size = 1;
|
||||
|
||||
std::shared_ptr<IGradientComputer> gc;
|
||||
std::shared_ptr<IWeightsUpdater> wu;
|
||||
std::shared_ptr<IGradientComputer> gc;
|
||||
|
||||
if (!parameters.empty())
|
||||
{
|
||||
learning_rate = applyVisitor(FieldVisitorConvertToNumber<Float64>(), parameters[0]);
|
||||
}
|
||||
if (parameters.size() > 1)
|
||||
{
|
||||
batch_size = applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[1]);
|
||||
|
||||
}
|
||||
|
||||
if (std::is_same<Method, FuncLinearRegression>::value)
|
||||
{
|
||||
gc = std::make_shared<LinearRegression>();
|
||||
} else if (std::is_same<Method, FuncLogisticRegression>::value)
|
||||
{
|
||||
gc = std::make_shared<LogisticRegression>();
|
||||
} else
|
||||
{
|
||||
throw Exception("Such gradient computer is not implemented yet", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
l2_reg_coef = applyVisitor(FieldVisitorConvertToNumber<Float64>(), parameters[1]);
|
||||
}
|
||||
if (parameters.size() > 2)
|
||||
{
|
||||
if (applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[2]) == Float64{1.0})
|
||||
batch_size = applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[2]);
|
||||
|
||||
}
|
||||
if (parameters.size() > 3)
|
||||
{
|
||||
if (applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[3]) == Float64{1.0})
|
||||
{
|
||||
wu = std::make_shared<StochasticGradientDescent>();
|
||||
} else if (applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[2]) == Float64{2.0})
|
||||
} else if (applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[3]) == Float64{2.0})
|
||||
{
|
||||
wu = std::make_shared<Momentum>();
|
||||
} else if (applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[2]) == Float64{3.0})
|
||||
} else if (applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[3]) == Float64{3.0})
|
||||
{
|
||||
wu = std::make_shared<Nesterov>();
|
||||
|
||||
} else if (applyVisitor(FieldVisitorConvertToNumber<UInt32>(), parameters[2]) == Float64{4.0})
|
||||
{
|
||||
/// Adam should be here
|
||||
wu = std::make_shared<Nesterov>();
|
||||
} else {
|
||||
throw Exception("Such weights updater is not implemented yet", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
throw Exception("Invalid parameter for weights updater", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
}
|
||||
} else
|
||||
{
|
||||
wu = std::make_unique<StochasticGradientDescent>();
|
||||
}
|
||||
|
||||
if (argument_types.size() < 2)
|
||||
throw Exception("Aggregate function " + name + " requires at least two arguments", ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
if (std::is_same<Method, FuncLinearRegression>::value)
|
||||
{
|
||||
gc = std::make_shared<LinearRegression>();
|
||||
} else if (std::is_same<Method, FuncLogisticRegression>::value)
|
||||
{
|
||||
gc = std::make_shared<LogisticRegression>();
|
||||
} else
|
||||
{
|
||||
throw Exception("Such gradient computer is not implemented yet", ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT);
|
||||
}
|
||||
|
||||
return std::make_shared<Method>(argument_types.size() - 1, gc, wu, learning_rate, batch_size, argument_types, parameters);
|
||||
|
||||
return std::make_shared<Method>(argument_types.size() - 1, gc, wu, learning_rate, l2_reg_coef, batch_size, argument_types, parameters);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -28,8 +28,7 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
/**
|
||||
IGradientComputer class computes gradient according to its loss function
|
||||
and stores mini-batch
|
||||
GradientComputer class computes gradient according to its loss function
|
||||
*/
|
||||
class IGradientComputer
|
||||
{
|
||||
@ -39,16 +38,12 @@ public:
|
||||
|
||||
virtual ~IGradientComputer() = default;
|
||||
|
||||
/// Adds to batch_gradient computed gradient in point (weigts, bias) using corresponding loss function
|
||||
/// Adds computed gradient in new point (weights, bias) to batch_gradient
|
||||
virtual void compute(std::vector<Float64> * batch_gradient, const std::vector<Float64> &weights, Float64 bias,
|
||||
Float64 learning_rate, Float64 target, const IColumn **columns, size_t row_num) = 0;
|
||||
Float64 learning_rate, Float64 l2_reg_coef, Float64 target, const IColumn **columns, size_t row_num) = 0;
|
||||
|
||||
virtual Float64 predict(const std::vector<Float64> &predict_feature,
|
||||
const std::vector<Float64> &weights,
|
||||
Float64 bias) const = 0;
|
||||
|
||||
/// Now we should use predict_for_all function instead of predict
|
||||
virtual void predict_for_all(ColumnVector<Float64>::Container &container,
|
||||
/// Now we should use predict_block function instead of predict
|
||||
virtual void predict_block(ColumnVector<Float64>::Container &container,
|
||||
Block &block, const ColumnNumbers &arguments,
|
||||
const std::vector<Float64> &weights,
|
||||
Float64 bias) const = 0;
|
||||
@ -62,7 +57,7 @@ public:
|
||||
{}
|
||||
|
||||
void compute(std::vector<Float64> * batch_gradient, const std::vector<Float64> &weights, Float64 bias,
|
||||
Float64 learning_rate, Float64 target, const IColumn **columns, size_t row_num) override
|
||||
Float64 learning_rate, Float64 l2_reg_coef, Float64 target, const IColumn **columns, size_t row_num) override
|
||||
{
|
||||
Float64 derivative = (target - bias);
|
||||
for (size_t i = 0; i < weights.size(); ++i)
|
||||
@ -75,27 +70,12 @@ public:
|
||||
for (size_t i = 0; i < weights.size(); ++i)
|
||||
{
|
||||
(*batch_gradient)[i] +=
|
||||
derivative * static_cast<const ColumnVector<Float64> &>(*columns[i]).getData()[row_num];
|
||||
derivative * static_cast<const ColumnVector<Float64> &>(*columns[i]).getData()[row_num]
|
||||
- 2 * l2_reg_coef * weights[i];
|
||||
}
|
||||
}
|
||||
|
||||
Float64 predict(const std::vector<Float64> &predict_feature,
|
||||
const std::vector<Float64> &weights, Float64 bias) const override
|
||||
{
|
||||
/// не обновляем веса при предикте, т.к. это может замедлить предсказание
|
||||
/// однако можно например обновлять их при каждом мердже не зависимо от того, сколько элементнов в батче
|
||||
|
||||
Float64 res{0.0};
|
||||
for (size_t i = 0; i < predict_feature.size(); ++i)
|
||||
{
|
||||
res += predict_feature[i] * weights[i];
|
||||
}
|
||||
res += bias;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
void predict_for_all(ColumnVector<Float64>::Container &container,
|
||||
void predict_block(ColumnVector<Float64>::Container &container,
|
||||
Block &block,
|
||||
const ColumnNumbers &arguments,
|
||||
const std::vector<Float64> &weights, Float64 bias) const override
|
||||
@ -133,7 +113,7 @@ public:
|
||||
{}
|
||||
|
||||
void compute(std::vector<Float64> * batch_gradient, const std::vector<Float64> &weights, Float64 bias,
|
||||
Float64 learning_rate, Float64 target, const IColumn **columns, size_t row_num) override
|
||||
Float64 learning_rate, Float64 l2_reg_coef, Float64 target, const IColumn **columns, size_t row_num) override
|
||||
{
|
||||
Float64 derivative = bias;
|
||||
for (size_t i = 0; i < weights.size(); ++i)
|
||||
@ -148,28 +128,12 @@ public:
|
||||
{
|
||||
(*batch_gradient)[i] +=
|
||||
learning_rate * target *
|
||||
static_cast<const ColumnVector<Float64> &>(*columns[i]).getData()[row_num]
|
||||
/ (derivative + 1);
|
||||
static_cast<const ColumnVector<Float64> &>(*columns[i]).getData()[row_num] / (derivative + 1)
|
||||
- 2 * l2_reg_coef * weights[i];
|
||||
}
|
||||
}
|
||||
|
||||
Float64 predict(const std::vector<Float64> &predict_feature,
|
||||
const std::vector<Float64> &weights, Float64 bias) const override
|
||||
{
|
||||
/// не обновляем веса при предикте, т.к. это может замедлить предсказание
|
||||
/// однако можно например обновлять их при каждом мердже не зависимо от того, сколько элементнов в батче
|
||||
|
||||
Float64 res{0.0};
|
||||
for (size_t i = 0; i < predict_feature.size(); ++i)
|
||||
{
|
||||
res += predict_feature[i] * weights[i];
|
||||
}
|
||||
res += bias;
|
||||
res = 1 / (1 + exp(-res));
|
||||
return res;
|
||||
}
|
||||
|
||||
void predict_for_all(ColumnVector<Float64>::Container & container,
|
||||
void predict_block(ColumnVector<Float64>::Container & container,
|
||||
Block & block,
|
||||
const ColumnNumbers & arguments,
|
||||
const std::vector<Float64> & weights, Float64 bias) const override
|
||||
@ -199,8 +163,8 @@ public:
|
||||
|
||||
|
||||
/**
|
||||
* IWeightsUpdater class defines the way to update current state
|
||||
* and uses GradientComputer on each iteration
|
||||
* IWeightsUpdater class defines the way to update current weights
|
||||
* and uses class GradientComputer on each iteration
|
||||
*/
|
||||
class IWeightsUpdater
|
||||
{
|
||||
@ -209,9 +173,9 @@ public:
|
||||
|
||||
virtual void add_to_batch(std::vector<Float64> * batch_gradient, std::shared_ptr<IGradientComputer> gc,
|
||||
const std::vector<Float64> & weights, Float64 bias,
|
||||
Float64 learning_rate, Float64 target, const IColumn **columns, size_t row_num)
|
||||
Float64 learning_rate, Float64 l2_reg_coef, Float64 target, const IColumn **columns, size_t row_num)
|
||||
{
|
||||
gc->compute(batch_gradient, weights, bias, learning_rate, target, columns, row_num);
|
||||
gc->compute(batch_gradient, weights, bias, learning_rate, l2_reg_coef, target, columns, row_num);
|
||||
}
|
||||
|
||||
virtual void update(UInt32 batch_size,
|
||||
@ -313,7 +277,7 @@ public:
|
||||
|
||||
void add_to_batch(std::vector<Float64> * batch_gradient, std::shared_ptr<IGradientComputer> gc,
|
||||
const std::vector<Float64> & weights, Float64 bias,
|
||||
Float64 learning_rate, Float64 target, const IColumn ** columns, size_t row_num) override
|
||||
Float64 learning_rate, Float64 l2_reg_coef, Float64 target, const IColumn ** columns, size_t row_num) override
|
||||
{
|
||||
if (accumulated_gradient.empty())
|
||||
{
|
||||
@ -327,7 +291,7 @@ public:
|
||||
}
|
||||
auto shifted_bias = bias + accumulated_gradient[weights.size()] * alpha_;
|
||||
|
||||
gc->compute(batch_gradient, shifted_weights, shifted_bias, learning_rate, target, columns, row_num);
|
||||
gc->compute(batch_gradient, shifted_weights, shifted_bias, learning_rate, l2_reg_coef, target, columns, row_num);
|
||||
}
|
||||
|
||||
void update(UInt32 batch_size,
|
||||
@ -376,75 +340,8 @@ private:
|
||||
};
|
||||
|
||||
|
||||
// TODO: проверить после изменения логики моментума
|
||||
/*
|
||||
class Adam : public IWeightsUpdater
|
||||
{
|
||||
public:
|
||||
Adam()
|
||||
{}
|
||||
|
||||
Adam(Float64 betta1, Float64 betta2) : betta1_(betta1), betta2_(betta2), betta1t_(betta1), betta2t_(betta2)
|
||||
{}
|
||||
|
||||
void update(UInt32 cur_batch,
|
||||
std::vector<Float64> & weights, Float64 & bias,
|
||||
std::vector<Float64> * batch_gradient) override
|
||||
{
|
||||
if (mt_.size() == 0)
|
||||
{
|
||||
mt_.resize(batch_gradient.size(), Float64{0.0});
|
||||
vt_.resize(batch_gradient.size(), Float64{0.0});
|
||||
}
|
||||
Float64 eps = 0.01;
|
||||
for (size_t i = 0; i < batch_gradient.size(); ++i)
|
||||
{
|
||||
mt_[i] = mt_[i] * betta1_ + (1 - betta1_) * batch_gradient[i];
|
||||
vt_[i] = vt_[i] * betta2_ + (1 - betta2_) * batch_gradient[i] * batch_gradient[i];
|
||||
if (t < 8)
|
||||
{
|
||||
mt_[i] = mt_[i] / (1 - betta1t_);
|
||||
betta1t_ *= betta1_;
|
||||
}
|
||||
if (t < 850)
|
||||
{
|
||||
vt_[i] = vt_[i] / (1 - betta2t_);
|
||||
betta2t_ *= betta2_;
|
||||
}
|
||||
}
|
||||
for (size_t i = 0; i < weights.size(); ++i)
|
||||
{
|
||||
weights[i] += (mt_[i] / (sqrt(vt_[i] + eps))) / cur_batch;
|
||||
}
|
||||
bias += (mt_[weights.size()] / (sqrt(vt_[weights.size()] + eps))) / cur_batch;
|
||||
t += 1;
|
||||
}
|
||||
|
||||
virtual void merge(const IWeightsUpdater &rhs, Float64 frac, Float64 rhs_frac) override
|
||||
{
|
||||
auto &adam_rhs = static_cast<const Adam &>(rhs);
|
||||
for (size_t i = 0; i < mt_.size(); ++i)
|
||||
{
|
||||
mt_[i] = mt_[i] * frac + adam_rhs.mt_[i] * rhs_frac;
|
||||
vt_[i] = vt_[i] * frac + adam_rhs.vt_[i] * rhs_frac;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
Float64 betta1_{0.2};
|
||||
Float64 betta2_{0.3};
|
||||
Float64 betta1t_{0.3};
|
||||
Float64 betta2t_{0.3};
|
||||
UInt32 t = 0;
|
||||
std::vector<Float64> mt_;
|
||||
std::vector<Float64> vt_;
|
||||
};
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* LinearModelData is a class which manages current state of learning
|
||||
* and is stored as AggregateFunctionState
|
||||
*/
|
||||
class LinearModelData
|
||||
{
|
||||
@ -453,11 +350,13 @@ public:
|
||||
{}
|
||||
|
||||
LinearModelData(Float64 learning_rate,
|
||||
Float64 l2_reg_coef,
|
||||
UInt32 param_num,
|
||||
UInt32 batch_capacity,
|
||||
std::shared_ptr<IGradientComputer> gc,
|
||||
std::shared_ptr<IWeightsUpdater> wu)
|
||||
: learning_rate(learning_rate),
|
||||
l2_reg_coef(l2_reg_coef),
|
||||
batch_capacity(batch_capacity),
|
||||
batch_size(0),
|
||||
gradient_computer(std::move(gc)),
|
||||
@ -474,7 +373,7 @@ public:
|
||||
|
||||
/// Here we have columns + 1 as first column corresponds to target value, and others - to features
|
||||
weights_updater->add_to_batch(&gradient_batch, gradient_computer,
|
||||
weights, bias, learning_rate, target, columns + 1, row_num);
|
||||
weights, bias, learning_rate, l2_reg_coef, target, columns + 1, row_num);
|
||||
|
||||
++batch_size;
|
||||
if (batch_size == batch_capacity)
|
||||
@ -489,20 +388,18 @@ public:
|
||||
return;
|
||||
|
||||
update_state();
|
||||
/// нельзя обновить из-за константости
|
||||
// rhs.update_weights();
|
||||
/// can't update rhs state because it's constant
|
||||
|
||||
Float64 frac = static_cast<Float64>(iter_num) / (iter_num + rhs.iter_num);
|
||||
Float64 rhs_frac = static_cast<Float64>(rhs.iter_num) / (iter_num + rhs.iter_num);
|
||||
Float64 frac = (static_cast<Float64>(iter_num) * iter_num) / (iter_num * iter_num + rhs.iter_num * rhs.iter_num);
|
||||
|
||||
for (size_t i = 0; i < weights.size(); ++i)
|
||||
{
|
||||
weights[i] = weights[i] * frac + rhs.weights[i] * rhs_frac;
|
||||
weights[i] = weights[i] * frac + rhs.weights[i] * (1 - frac);
|
||||
}
|
||||
bias = bias * frac + rhs.bias * (1 - frac);
|
||||
|
||||
bias = bias * frac + rhs.bias * rhs_frac;
|
||||
iter_num += rhs.iter_num;
|
||||
weights_updater->merge(*rhs.weights_updater, frac, rhs_frac);
|
||||
weights_updater->merge(*rhs.weights_updater, frac, 1 - frac);
|
||||
}
|
||||
|
||||
void write(WriteBuffer &buf) const
|
||||
@ -525,21 +422,9 @@ public:
|
||||
weights_updater->read(buf);
|
||||
}
|
||||
|
||||
Float64 predict(const std::vector<Float64> &predict_feature) const
|
||||
void predict_block(ColumnVector<Float64>::Container &container, Block &block, const ColumnNumbers &arguments) const
|
||||
{
|
||||
/// не обновляем веса при предикте, т.к. это может замедлить предсказание
|
||||
/// однако можно например обновлять их при каждом мердже независимо от того, сколько элементнов в батче
|
||||
// if (cur_batch)
|
||||
// {
|
||||
// update_weights();
|
||||
// }
|
||||
|
||||
return gradient_computer->predict(predict_feature, weights, bias);
|
||||
}
|
||||
|
||||
void predict_for_all(ColumnVector<Float64>::Container &container, Block &block, const ColumnNumbers &arguments) const
|
||||
{
|
||||
gradient_computer->predict_for_all(container, block, arguments, weights, bias);
|
||||
gradient_computer->predict_block(container, block, arguments, weights, bias);
|
||||
}
|
||||
|
||||
private:
|
||||
@ -547,6 +432,7 @@ private:
|
||||
Float64 bias{0.0};
|
||||
|
||||
Float64 learning_rate;
|
||||
Float64 l2_reg_coef;
|
||||
UInt32 batch_capacity;
|
||||
|
||||
UInt32 iter_num = 0;
|
||||
@ -557,8 +443,8 @@ private:
|
||||
std::shared_ptr<IWeightsUpdater> weights_updater;
|
||||
|
||||
/**
|
||||
* The function is called when we want to flush current batch and make a step with it
|
||||
*/
|
||||
* The function is called when we want to flush current batch and update our weights
|
||||
*/
|
||||
void update_state()
|
||||
{
|
||||
if (batch_size == 0)
|
||||
@ -587,12 +473,14 @@ public:
|
||||
std::shared_ptr<IGradientComputer> gradient_computer,
|
||||
std::shared_ptr<IWeightsUpdater> weights_updater,
|
||||
Float64 learning_rate,
|
||||
Float64 l2_reg_coef,
|
||||
UInt32 batch_size,
|
||||
const DataTypes & argument_types,
|
||||
const Array & params)
|
||||
: IAggregateFunctionDataHelper<Data, AggregateFunctionMLMethod<Data, Name>>(argument_types, params),
|
||||
param_num(param_num),
|
||||
learning_rate(learning_rate),
|
||||
l2_reg_coef(l2_reg_coef),
|
||||
batch_size(batch_size),
|
||||
gc(std::move(gradient_computer)),
|
||||
wu(std::move(weights_updater)) {
|
||||
@ -605,7 +493,7 @@ public:
|
||||
|
||||
void create(AggregateDataPtr place) const override
|
||||
{
|
||||
new (place) Data(learning_rate, param_num, batch_size, gc, wu);
|
||||
new (place) Data(learning_rate, l2_reg_coef, param_num, batch_size, gc, wu);
|
||||
}
|
||||
|
||||
void add(AggregateDataPtr place, const IColumn ** columns, size_t row_num, Arena *) const override
|
||||
@ -613,10 +501,8 @@ public:
|
||||
this->data(place).add(columns, row_num);
|
||||
}
|
||||
|
||||
/// хочется не константный rhs
|
||||
void merge(AggregateDataPtr place, ConstAggregateDataPtr rhs, Arena *) const override
|
||||
{
|
||||
std::cout << "\nMERGING!!\n\n";
|
||||
this->data(place).merge(this->data(rhs));
|
||||
}
|
||||
|
||||
@ -632,8 +518,6 @@ public:
|
||||
|
||||
void predictResultInto(ConstAggregateDataPtr place, IColumn & to, Block & block, const ColumnNumbers & arguments) const
|
||||
{
|
||||
std::cout << "\nPREDICTING!!\n\n";
|
||||
|
||||
if (arguments.size() != param_num + 1)
|
||||
throw Exception("Predict got incorrect number of arguments. Got: " +
|
||||
std::to_string(arguments.size()) + ". Required: " + std::to_string(param_num + 1),
|
||||
@ -641,20 +525,7 @@ public:
|
||||
|
||||
auto &column = dynamic_cast<ColumnVector<Float64> &>(to);
|
||||
|
||||
/// Так делали с одним предиктом, пока пусть побудет тут
|
||||
// std::vector<Float64> predict_features(arguments.size() - 1);
|
||||
// for (size_t i = 1; i < arguments.size(); ++i)
|
||||
// {
|
||||
// const auto& element = (*block.getByPosition(arguments[i]).column)[row_num];
|
||||
// if (element.getType() != Field::Types::Float64)
|
||||
// throw Exception("Prediction arguments must be values of type Float",
|
||||
// ErrorCodes::BAD_ARGUMENTS);
|
||||
//
|
||||
//// predict_features[i - 1] = element.get<Float64>();
|
||||
// }
|
||||
// column.getData().push_back(this->data(place).predict(predict_features));
|
||||
// column.getData().push_back(this->data(place).predict_for_all());
|
||||
this->data(place).predict_for_all(column.getData(), block, arguments);
|
||||
this->data(place).predict_block(column.getData(), block, arguments);
|
||||
}
|
||||
|
||||
void insertResultInto(ConstAggregateDataPtr place, IColumn & to) const override
|
||||
@ -669,6 +540,7 @@ public:
|
||||
private:
|
||||
UInt32 param_num;
|
||||
Float64 learning_rate;
|
||||
Float64 l2_reg_coef;
|
||||
UInt32 batch_size;
|
||||
std::shared_ptr<IGradientComputer> gc;
|
||||
std::shared_ptr<IWeightsUpdater> wu;
|
||||
|
@ -1,300 +1,2 @@
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
-67.00423606399804
|
||||
1
|
||||
1
|
||||
|
File diff suppressed because one or more lines are too long
@ -1,92 +1,4 @@
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0.014756373152892969
|
||||
0.9981703893717232
|
||||
0
|
||||
1
|
||||
1
|
||||
0
|
||||
|
@ -10,7 +10,10 @@ CREATE TABLE IF NOT EXISTS test.defaults
|
||||
) ENGINE = Memory;
|
||||
insert into test.defaults values (1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2),(1,2,1,-1,-2),(-1,-2,-1,1,2)
|
||||
DROP TABLE IF EXISTS test.model;
|
||||
create table test.model engine = Memory as select LogisticRegressionState(0.1, 5, 1.0)(target, param1, param2) as state from test.defaults;
|
||||
create table test.model engine = Memory as select LogisticRegressionState(0.1, 0.0, 1.0, 1)(target, param1, param2) as state from test.defaults;
|
||||
|
||||
select ans < 1.1 and ans > 0.9 from
|
||||
(with (select state from test.model) as model select evalMLMethod(model, predict1, predict2) as ans from test.defaults limit 2);
|
||||
|
||||
with (select state from test.model) as model select evalMLMethod(model, predict1, predict2) from test.defaults;
|
||||
select ans > -0.1 and ans < 0.1 from
|
||||
(with (select state from test.model) as model select evalMLMethod(model, predict1, predict2) as ans from test.defaults limit 2);
|
||||
|
File diff suppressed because one or more lines are too long
@ -0,0 +1 @@
|
||||
1
|
File diff suppressed because one or more lines are too long
@ -1,580 +0,0 @@
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
0.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
||||
1.0
|
@ -1,580 +0,0 @@
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6411012851604063
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.6882314202562039
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.4364572659523192
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6093243100848463
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.6997646999628921
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.5422581250564131
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6508734063103988
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.6985513111559237
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.45244725477787
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.6261675112820656
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7461838992404664
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.7018563514656831
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.36013871027066185
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.5238855727602139
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6804249705341022
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6050764770369429
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.6440334745784803
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5716205957425191
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.5311888217657855
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
||||
0.6986271931040996
|
@ -1 +0,0 @@
|
||||
0.7692307692307693
|
File diff suppressed because one or more lines are too long
@ -1 +1 @@
|
||||
-66.98005053600168
|
||||
1
|
||||
|
File diff suppressed because one or more lines are too long
@ -1 +0,0 @@
|
||||
-70.73127165094067
|
@ -1,22 +0,0 @@
|
||||
-- CREATE DATABASE IF NOT EXISTS test;
|
||||
-- DROP TABLE IF EXISTS test.trainset;
|
||||
-- CREATE TABLE IF NOT EXISTS test.trainset
|
||||
-- (
|
||||
-- param1 Float64, param2 Float64, param3 Float64, param4 Float64, param5 Float64, param6 Float64, param7 Float64, param8 Float64, param9 Float64, param10 Float64, param11 Float64, param12 Float64, param13 Float64, param14 Float64, param15 Float64, param16 Float64, param17 Float64, param18 Float64, param19 Float64, param20 Float64, param21 Float64, param22 Float64, param23 Float64, param24 Float64, param25 Float64, param26 Float64, param27 Float64, param28 Float64, param29 Float64, param30 Float64, param31 Float64, param32 Float64, param33 Float64, param34 Float64, param35 Float64, param36 Float64, param37 Float64, param38 Float64, param39 Float64, param40 Float64, param41 Float64, param42 Float64, param43 Float64, param44 Float64, param45 Float64, param46 Float64, param47 Float64, param48 Float64, param49 Float64, param50 Float64, param51 Float64, param52 Float64, param53 Float64, param54 Float64, param55 Float64, param56 Float64, param57 Float64, param58 Float64, param59 Float64, param60 Float64, param61 Float64, param62 Float64, param63 Float64, param64 Float64, param65 Float64, param66 Float64, param67 Float64, param68 Float64, param69 Float64, param70 Float64, param71 Float64, param72 Float64, param73 Float64, param74 Float64, param75 Float64, param76 Float64, param77 Float64, param78 Float64, param79 Float64, param80 Float64, param81 Float64, param82 Float64, param83 Float64, param84 Float64, param85 Float64, param86 Float64, param87 Float64, param88 Float64, param89 Float64, param90 Float64, param91 Float64, param92 Float64, param93 Float64, param94 Float64, param95 Float64, param96 Float64, param97 Float64, param98 Float64, param99 Float64, param100 Float64, param101 Float64, param102 Float64, param103 Float64, param104 Float64, param105 Float64, param106 Float64, param107 Float64, param108 Float64, param109 Float64, param110 Float64, param111 Float64, param112 Float64, param113 Float64, param114 Float64, param115 Float64, param116 Float64, param117 Float64, param118 Float64, param119 Float64, param120 Float64, param121 Float64, param122 Float64, param123 Float64, param124 Float64, param125 Float64, param126 Float64, param127 Float64, param128 Float64, param129 Float64, param130 Float64, param131 Float64, param132 Float64, param133 Float64, param134 Float64, param135 Float64, param136 Float64, param137 Float64, param138 Float64, param139 Float64, param140 Float64, param141 Float64, param142 Float64, param143 Float64, param144 Float64, param145 Float64, param146 Float64, param147 Float64, param148 Float64, param149 Float64, param150 Float64, param151 Float64, param152 Float64, param153 Float64, param154 Float64, param155 Float64, param156 Float64, param157 Float64, param158 Float64, param159 Float64, param160 Float64, param161 Float64, param162 Float64, param163 Float64, param164 Float64, param165 Float64, param166 Float64, param167 Float64, param168 Float64, param169 Float64, param170 Float64, param171 Float64, param172 Float64, param173 Float64, param174 Float64, param175 Float64, param176 Float64, param177 Float64, param178 Float64, param179 Float64, param180 Float64, param181 Float64, param182 Float64, param183 Float64, target Float64
|
||||
-- ) ENGINE = Memory;
|
||||
-- DROP TABLE IF EXISTS test.testset;
|
||||
-- CREATE TABLE IF NOT EXISTS test.testset
|
||||
-- (
|
||||
-- param1 Float64, param2 Float64, param3 Float64, param4 Float64, param5 Float64, param6 Float64, param7 Float64, param8 Float64, param9 Float64, param10 Float64, param11 Float64, param12 Float64, param13 Float64, param14 Float64, param15 Float64, param16 Float64, param17 Float64, param18 Float64, param19 Float64, param20 Float64, param21 Float64, param22 Float64, param23 Float64, param24 Float64, param25 Float64, param26 Float64, param27 Float64, param28 Float64, param29 Float64, param30 Float64, param31 Float64, param32 Float64, param33 Float64, param34 Float64, param35 Float64, param36 Float64, param37 Float64, param38 Float64, param39 Float64, param40 Float64, param41 Float64, param42 Float64, param43 Float64, param44 Float64, param45 Float64, param46 Float64, param47 Float64, param48 Float64, param49 Float64, param50 Float64, param51 Float64, param52 Float64, param53 Float64, param54 Float64, param55 Float64, param56 Float64, param57 Float64, param58 Float64, param59 Float64, param60 Float64, param61 Float64, param62 Float64, param63 Float64, param64 Float64, param65 Float64, param66 Float64, param67 Float64, param68 Float64, param69 Float64, param70 Float64, param71 Float64, param72 Float64, param73 Float64, param74 Float64, param75 Float64, param76 Float64, param77 Float64, param78 Float64, param79 Float64, param80 Float64, param81 Float64, param82 Float64, param83 Float64, param84 Float64, param85 Float64, param86 Float64, param87 Float64, param88 Float64, param89 Float64, param90 Float64, param91 Float64, param92 Float64, param93 Float64, param94 Float64, param95 Float64, param96 Float64, param97 Float64, param98 Float64, param99 Float64, param100 Float64, param101 Float64, param102 Float64, param103 Float64, param104 Float64, param105 Float64, param106 Float64, param107 Float64, param108 Float64, param109 Float64, param110 Float64, param111 Float64, param112 Float64, param113 Float64, param114 Float64, param115 Float64, param116 Float64, param117 Float64, param118 Float64, param119 Float64, param120 Float64, param121 Float64, param122 Float64, param123 Float64, param124 Float64, param125 Float64, param126 Float64, param127 Float64, param128 Float64, param129 Float64, param130 Float64, param131 Float64, param132 Float64, param133 Float64, param134 Float64, param135 Float64, param136 Float64, param137 Float64, param138 Float64, param139 Float64, param140 Float64, param141 Float64, param142 Float64, param143 Float64, param144 Float64, param145 Float64, param146 Float64, param147 Float64, param148 Float64, param149 Float64, param150 Float64, param151 Float64, param152 Float64, param153 Float64, param154 Float64, param155 Float64, param156 Float64, param157 Float64, param158 Float64, param159 Float64, param160 Float64, param161 Float64, param162 Float64, param163 Float64, param164 Float64, param165 Float64, param166 Float64, param167 Float64, param168 Float64, param169 Float64, param170 Float64, param171 Float64, param172 Float64, param173 Float64, param174 Float64, param175 Float64, param176 Float64, param177 Float64, param178 Float64, param179 Float64, param180 Float64, param181 Float64, param182 Float64, param183 Float64
|
||||
-- ) ENGINE = Memory;
|
||||
|
||||
SET send_logs_level = 'trace';
|
||||
-- SET log_queries = 1;
|
||||
-- SET max_threads = 4;
|
||||
|
||||
-- drop table if exists test.model;
|
||||
-- create table if not exists test.model engine = Memory as select LinearRegressionState(0.0000001, 4, 2.0)(target, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11, param12, param13, param14, param15, param16, param17, param18, param19, param20, param21, param22, param23, param24, param25, param26, param27, param28, param29, param30, param31, param32, param33, param34, param35, param36, param37, param38, param39, param40, param41, param42, param43, param44, param45, param46, param47, param48, param49, param50, param51, param52, param53, param54, param55, param56, param57, param58, param59, param60, param61, param62, param63, param64, param65, param66, param67, param68, param69, param70, param71, param72, param73, param74, param75, param76, param77, param78, param79, param80, param81, param82, param83, param84, param85, param86, param87, param88, param89, param90, param91, param92, param93, param94, param95, param96, param97, param98, param99, param100, param101, param102, param103, param104, param105, param106, param107, param108, param109, param110, param111, param112, param113, param114, param115, param116, param117, param118, param119, param120, param121, param122, param123, param124, param125, param126, param127, param128, param129, param130, param131, param132, param133, param134, param135, param136, param137, param138, param139, param140, param141, param142, param143, param144, param145, param146, param147, param148, param149, param150, param151, param152, param153, param154, param155, param156, param157, param158, param159, param160, param161, param162, param163, param164, param165, param166, param167, param168, param169, param170, param171, param172, param173, param174, param175, param176, param177, param178, param179, param180, param181, param182, param183) as state from test.trainset;
|
||||
|
||||
select LinearRegressionState(0.0000001, 4, 2.0)(target, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11, param12, param13, param14, param15, param16, param17, param18, param19, param20, param21, param22, param23, param24, param25, param26, param27, param28, param29, param30, param31, param32, param33, param34, param35, param36, param37, param38, param39, param40, param41, param42, param43, param44, param45, param46, param47, param48, param49, param50, param51, param52, param53, param54, param55, param56, param57, param58, param59, param60, param61, param62, param63, param64, param65, param66, param67, param68, param69, param70, param71, param72, param73, param74, param75, param76, param77, param78, param79, param80, param81, param82, param83, param84, param85, param86, param87, param88, param89, param90, param91, param92, param93, param94, param95, param96, param97, param98, param99, param100, param101, param102, param103, param104, param105, param106, param107, param108, param109, param110, param111, param112, param113, param114, param115, param116, param117, param118, param119, param120, param121, param122, param123, param124, param125, param126, param127, param128, param129, param130, param131, param132, param133, param134, param135, param136, param137, param138, param139, param140, param141, param142, param143, param144, param145, param146, param147, param148, param149, param150, param151, param152, param153, param154, param155, param156, param157, param158, param159, param160, param161, param162, param163, param164, param165, param166, param167, param168, param169, param170, param171, param172, param173, param174, param175, param176, param177, param178, param179, param180, param181, param182, param183) from test.trainset;
|
||||
|
||||
-- with (select state from test.model) as model select evalMLMethod(model, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11, param12, param13, param14, param15, param16, param17, param18, param19, param20, param21, param22, param23, param24, param25, param26, param27, param28, param29, param30, param31, param32, param33, param34, param35, param36, param37, param38, param39, param40, param41, param42, param43, param44, param45, param46, param47, param48, param49, param50, param51, param52, param53, param54, param55, param56, param57, param58, param59, param60, param61, param62, param63, param64, param65, param66, param67, param68, param69, param70, param71, param72, param73, param74, param75, param76, param77, param78, param79, param80, param81, param82, param83, param84, param85, param86, param87, param88, param89, param90, param91, param92, param93, param94, param95, param96, param97, param98, param99, param100, param101, param102, param103, param104, param105, param106, param107, param108, param109, param110, param111, param112, param113, param114, param115, param116, param117, param118, param119, param120, param121, param122, param123, param124, param125, param126, param127, param128, param129, param130, param131, param132, param133, param134, param135, param136, param137, param138, param139, param140, param141, param142, param143, param144, param145, param146, param147, param148, param149, param150, param151, param152, param153, param154, param155, param156, param157, param158, param159, param160, param161, param162, param163, param164, param165, param166, param167, param168, param169, param170, param171, param172, param173, param174, param175, param176, param177, param178, param179, param180, param181, param182, param183) from test.testset;
|
Loading…
Reference in New Issue
Block a user