提交 4436ba0c 编写于 作者: Z zhouxiao-coder

elu: Optimize gradient calculation;Add more comments

上级 a815d6ab
...@@ -180,16 +180,18 @@ class ELUOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -180,16 +180,18 @@ class ELUOpMaker : public framework::OpProtoAndCheckerMaker {
ELUOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker) ELUOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
: OpProtoAndCheckerMaker(proto, op_checker) { : OpProtoAndCheckerMaker(proto, op_checker) {
AddInput("X", AddInput("X",
"Input of ELU operator, it shouldn't be empty. Input is flattened " "(Tensor) The input of ELU operator, it shouldn't be empty. Input "
"and treated as a 1D array."); "is flattened and treated as a 1D array.");
AddOutput("Y", "Output of ELU operator, has same shape as the input."); AddOutput("Y",
AddComment( "(Tensor) The output of ELU operator. It has the same shape as "
"ELU activation operator. It applies this element-wise computation on " "the input.");
"the input: f(x) = max(0, x) + min(0, alpha * (exp(x) - 1))." AddAttr<AttrType>(
"Check .. _Link: https://arxiv.org/abs/1511.07289 for more details"); "alpha", "(float, default 1.0) Alpha value in the elu formulation.")
AddAttr<AttrType>("alpha",
"alpha value in the elu formulation, default to 1.")
.SetDefault(static_cast<AttrType>(1.)); .SetDefault(static_cast<AttrType>(1.));
AddComment(R"DOC(
ELU activation operator. It applies this element-wise computation on
the input: f(x) = max(0, x) + min(0, alpha * (exp(x) - 1)).
Check .. _Link: https://arxiv.org/abs/1511.07289 for more details.)DOC");
} }
}; };
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册