提交 de8db1ab 编写于 作者: S sunsuodong

support leaky relu

上级 8e5d4445
......@@ -142,6 +142,7 @@ table SoftMax {
table Activation {
type: ActivationType = 0;
alpha: float = 0.2;
}
table ActivationGrad {
type: ActivationGradType = 0;
......
......@@ -487,6 +487,7 @@ OpParameter *PopulateActivationParameter(const lite::Primitive *primitive) {
}
auto activation = primitive->Value()->value_as_Activation();
act_param->type_ = static_cast<int>(activation->type());
act_param->alpha_ = activation->alpha();
return reinterpret_cast<OpParameter *>(act_param);
}
......
......@@ -24,7 +24,7 @@
struct ActivationParameter {
OpParameter op_parameter_;
int type_;
float alpha_{0.01};
float alpha_{0.2};
};
inline int Relu(const float *src, int length, float *dst) {
......
......@@ -25,22 +25,18 @@ STATUS CaffeReluParser::Parse(const caffe::LayerParameter &proto,
std::vector<schema::TensorT *> *weightVec) {
std::unique_ptr<schema::ActivationT> attr(new schema::ActivationT());
attr->type = schema::ActivationType_RELU;
op->primitive = std::make_unique<schema::PrimitiveT>();
op->primitive->value.value = attr.release();
op->primitive->value.type = schema::PrimitiveType_Activation;
// relu: negative_slope = 0, no parameter;
// leakyrelu: negative_slope != 0;
if (proto.has_relu_param() && proto.relu_param().has_negative_slope()) {
float negative_slope = proto.relu_param().negative_slope();
if (0 != negative_slope) {
std::unique_ptr<schema::LeakyReLUT> attrLeakyReLu(new schema::LeakyReLUT());
attrLeakyReLu->negativeSlope = negative_slope;
op->primitive = std::make_unique<schema::PrimitiveT>();
op->primitive->value.type = schema::PrimitiveType_LeakyReLU;
op->primitive->value.value = attrLeakyReLu.release();
attr->type = schema::ActivationType_LEAKY_RELU;
attr->alpha = negative_slope;
}
}
op->primitive = std::make_unique<schema::PrimitiveT>();
op->primitive->value.value = attr.release();
op->primitive->value.type = schema::PrimitiveType_Activation;
return RET_OK;
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册