提交 de8db1ab 编写于 作者: S sunsuodong

support leaky relu

上级 8e5d4445
...@@ -142,6 +142,7 @@ table SoftMax { ...@@ -142,6 +142,7 @@ table SoftMax {
table Activation { table Activation {
type: ActivationType = 0; type: ActivationType = 0;
alpha: float = 0.2;
} }
table ActivationGrad { table ActivationGrad {
type: ActivationGradType = 0; type: ActivationGradType = 0;
......
...@@ -487,6 +487,7 @@ OpParameter *PopulateActivationParameter(const lite::Primitive *primitive) { ...@@ -487,6 +487,7 @@ OpParameter *PopulateActivationParameter(const lite::Primitive *primitive) {
} }
auto activation = primitive->Value()->value_as_Activation(); auto activation = primitive->Value()->value_as_Activation();
act_param->type_ = static_cast<int>(activation->type()); act_param->type_ = static_cast<int>(activation->type());
act_param->alpha_ = activation->alpha();
return reinterpret_cast<OpParameter *>(act_param); return reinterpret_cast<OpParameter *>(act_param);
} }
......
...@@ -24,7 +24,7 @@ ...@@ -24,7 +24,7 @@
struct ActivationParameter { struct ActivationParameter {
OpParameter op_parameter_; OpParameter op_parameter_;
int type_; int type_;
float alpha_{0.01}; float alpha_{0.2};
}; };
inline int Relu(const float *src, int length, float *dst) { inline int Relu(const float *src, int length, float *dst) {
......
...@@ -25,22 +25,18 @@ STATUS CaffeReluParser::Parse(const caffe::LayerParameter &proto, ...@@ -25,22 +25,18 @@ STATUS CaffeReluParser::Parse(const caffe::LayerParameter &proto,
std::vector<schema::TensorT *> *weightVec) { std::vector<schema::TensorT *> *weightVec) {
std::unique_ptr<schema::ActivationT> attr(new schema::ActivationT()); std::unique_ptr<schema::ActivationT> attr(new schema::ActivationT());
attr->type = schema::ActivationType_RELU; attr->type = schema::ActivationType_RELU;
op->primitive = std::make_unique<schema::PrimitiveT>();
op->primitive->value.value = attr.release();
op->primitive->value.type = schema::PrimitiveType_Activation;
// relu: negative_slope = 0, no parameter; // relu: negative_slope = 0, no parameter;
// leakyrelu: negative_slope != 0; // leakyrelu: negative_slope != 0;
if (proto.has_relu_param() && proto.relu_param().has_negative_slope()) { if (proto.has_relu_param() && proto.relu_param().has_negative_slope()) {
float negative_slope = proto.relu_param().negative_slope(); float negative_slope = proto.relu_param().negative_slope();
if (0 != negative_slope) { if (0 != negative_slope) {
std::unique_ptr<schema::LeakyReLUT> attrLeakyReLu(new schema::LeakyReLUT()); attr->type = schema::ActivationType_LEAKY_RELU;
attrLeakyReLu->negativeSlope = negative_slope; attr->alpha = negative_slope;
op->primitive = std::make_unique<schema::PrimitiveT>();
op->primitive->value.type = schema::PrimitiveType_LeakyReLU;
op->primitive->value.value = attrLeakyReLu.release();
} }
} }
op->primitive = std::make_unique<schema::PrimitiveT>();
op->primitive->value.value = attr.release();
op->primitive->value.type = schema::PrimitiveType_Activation;
return RET_OK; return RET_OK;
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册