selu_op.cc 4.5 KB
Newer Older
C
chengduo 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15
#include <memory>
C
chengduo 已提交
16
#include <string>
17
#include <unordered_map>
C
chengduo 已提交
18

X
xiongkun 已提交
19 20 21 22
#include "paddle/fluid/framework/infershape_utils.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/phi/infermeta/unary.h"
23

C
chengduo 已提交
24 25 26 27 28 29 30 31 32 33 34 35 36 37
namespace paddle {
namespace operators {

class SeluOp : public framework::OperatorWithKernel {
 public:
  SeluOp(const std::string &type, const framework::VariableNameMap &inputs,
         const framework::VariableNameMap &outputs,
         const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

 protected:
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext &ctx) const override {
    return framework::OpKernelType(
38
        OperatorWithKernel::IndicateVarDataType(ctx, "X"), ctx.GetPlace());
C
chengduo 已提交
39 40 41 42 43
  }
};

class SeluOpInferVarType : public framework::PassInDtypeAndVarTypeToOutput {
 protected:
44
  std::unordered_map<std::string, std::string> &GetInputOutputWithSameType()
C
chengduo 已提交
45
      const override {
46 47
    static std::unordered_map<std::string, std::string> m{{"X", /*->*/ "Out"}};
    return m;
C
chengduo 已提交
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83
  }
};

class SeluOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  void Make() override {
    AddInput("X", "The input tensor of selu operator.");
    AddOutput("Out", "The output tensor of selu operator.");
    AddAttr<float>("scale",
                   "(float) the default value is 1.0507~. For more "
                   "information about this value, please refer to:"
                   "https://arxiv.org/abs/1706.02515.")
        .SetDefault(1.0507009873554804934193349852946);
    AddAttr<float>("alpha",
                   "(float) the default value is 1.6732~. For more "
                   "information about this value, please refer to:"
                   "https://arxiv.org/abs/1706.02515.")
        .SetDefault(1.6732632423543772848170429916717);
    AddComment(R"DOC(
Selu Operator.

The equation is:
$$
f(x) =\lambda*
\begin{cases}
 \quad \quad   x,  \quad \quad \quad \text{if} \ x > 0 \\
 \alpha * e^x - \alpha,  \qquad  \text{if} \ x <= 0
\end{cases}
$$

The input `X` can carry the LoD (Level of Details) information,
or not. And the output shares the LoD information with input `X`.
)DOC");
  }
};

H
hong 已提交
84 85
template <typename T>
class SeluGradMaker : public framework::SingleGradOpMaker<T> {
C
chengduo 已提交
86
 public:
H
hong 已提交
87
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
C
chengduo 已提交
88

89
  void Apply(GradOpPtr<T> grad_op) const override {
C
chengduo 已提交
90
    grad_op->SetType("selu_grad");
H
hong 已提交
91 92 93
    grad_op->SetInput("Out", this->Output("Out"));
    grad_op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    grad_op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
C
chengduo 已提交
94 95 96 97 98 99 100 101 102
    grad_op->SetAttrMap(this->Attrs());
  }
};

class SeluGradOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext *ctx) const override {
103 104 105
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
                   "Out@GRAD", "selu_grad");
    OP_INOUT_CHECK(ctx->HasInput("Out"), "Input", "Out", "selu_grad");
C
chengduo 已提交
106 107 108 109 110 111 112 113
    auto x_grad_name = framework::GradVarName("X");
    ctx->SetOutputDim(x_grad_name, ctx->GetInputDim("Out"));
  }

 protected:
  framework::OpKernelType GetExpectedKernelType(
      const framework::ExecutionContext &ctx) const override {
    return framework::OpKernelType(
114
        OperatorWithKernel::IndicateVarDataType(ctx, "Out"), ctx.GetPlace());
C
chengduo 已提交
115 116 117 118 119 120 121 122
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;

123 124
DECLARE_INFER_SHAPE_FUNCTOR(selu, SeluInferShapeFunctor,
                            PD_INFER_META(phi::UnchangedInferMeta));
X
xiongkun 已提交
125

C
chengduo 已提交
126
REGISTER_OPERATOR(selu, ops::SeluOp, ops::SeluOpMaker, ops::SeluOpInferVarType,
H
hong 已提交
127
                  ops::SeluGradMaker<paddle::framework::OpDesc>,
X
xiongkun 已提交
128 129 130
                  ops::SeluGradMaker<paddle::imperative::OpBase>,
                  SeluInferShapeFunctor);

C
chengduo 已提交
131
REGISTER_OPERATOR(selu_grad, ops::SeluGradOp);