From 4436ba0c56d105b0c1305a739158fdc08258f7a9 Mon Sep 17 00:00:00 2001 From: zhouxiao-coder Date: Fri, 29 Sep 2017 17:52:18 +0800 Subject: [PATCH] elu: Optimize gradient calculation;Add more comments --- paddle/operators/activation_op.cc | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/paddle/operators/activation_op.cc b/paddle/operators/activation_op.cc index e83666c9f92..7d086ac5dfb 100644 --- a/paddle/operators/activation_op.cc +++ b/paddle/operators/activation_op.cc @@ -180,16 +180,18 @@ class ELUOpMaker : public framework::OpProtoAndCheckerMaker { ELUOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker) : OpProtoAndCheckerMaker(proto, op_checker) { AddInput("X", - "Input of ELU operator, it shouldn't be empty. Input is flattened " - "and treated as a 1D array."); - AddOutput("Y", "Output of ELU operator, has same shape as the input."); - AddComment( - "ELU activation operator. It applies this element-wise computation on " - "the input: f(x) = max(0, x) + min(0, alpha * (exp(x) - 1))." - "Check .. _Link: https://arxiv.org/abs/1511.07289 for more details"); - AddAttr("alpha", - "alpha value in the elu formulation, default to 1.") + "(Tensor) The input of ELU operator, it shouldn't be empty. Input " + "is flattened and treated as a 1D array."); + AddOutput("Y", + "(Tensor) The output of ELU operator. It has the same shape as " + "the input."); + AddAttr( + "alpha", "(float, default 1.0) Alpha value in the elu formulation.") .SetDefault(static_cast(1.)); + AddComment(R"DOC( + ELU activation operator. It applies this element-wise computation on + the input: f(x) = max(0, x) + min(0, alpha * (exp(x) - 1)). + Check .. _Link: https://arxiv.org/abs/1511.07289 for more details.)DOC"); } }; -- GitLab