prelu_op.cc 3.2 KB
Newer Older
Z
zchen0211 已提交
1 2
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Z
zchen0211 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Z
zchen0211 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Z
zchen0211 已提交
14

Y
Yi Wang 已提交
15 16
#include "paddle/fluid/operators/prelu_op.h"
#include "paddle/fluid/operators/net_op.h"
Z
zchen0211 已提交
17 18 19 20

namespace paddle {
namespace operators {

Z
fix  
zchen0211 已提交
21
class PReluOp : public framework::OperatorWithKernel {
Z
zchen0211 已提交
22
 public:
Z
fix  
zchen0211 已提交
23
  PReluOp(const std::string &type, const framework::VariableNameMap &inputs,
Z
zchen0211 已提交
24 25 26 27
          const framework::VariableNameMap &outputs,
          const framework::AttributeMap &attrs)
      : OperatorWithKernel(type, inputs, outputs, attrs) {}

28
  void InferShape(framework::InferShapeContext *ctx) const override {
Q
Qiao Longfei 已提交
29 30 31 32 33 34 35
    PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should not be null");
    PADDLE_ENFORCE(ctx->HasInput("Alpha"), "Input(Alpha) should not be null");
    PADDLE_ENFORCE(product(ctx->GetInputDim("Alpha")) == 1,
                   "Size of weight Alpha must be one.");
    PADDLE_ENFORCE(ctx->HasOutput("Out"), "Output(Out) should not be null");
    ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
    ctx->ShareLoD("X", /*->*/ "Out");
Z
zchen0211 已提交
36 37 38
  }
};

Z
fix  
zchen0211 已提交
39
class PReluOpMaker : public framework::OpProtoAndCheckerMaker {
Z
zchen0211 已提交
40
 public:
41
  PReluOpMaker(OpProto *proto, OpAttrChecker *op_checker)
Z
zchen0211 已提交
42
      : OpProtoAndCheckerMaker(proto, op_checker) {
Z
zchen0211 已提交
43
    AddInput("X", "The input tensor of prelu operator.");
K
kexinzhao 已提交
44 45 46 47
    AddInput("Alpha", "The alpha weight of prelu operator.");
    AddOutput("Out", "The output tensor of prelu operator.");
    AddComment(R"DOC(
PRelu Operator.
Z
zchen0211 已提交
48 49

The equation is:
Z
zchen0211 已提交
50

K
kexinzhao 已提交
51 52 53 54 55 56 57
$$
f(x) =
\begin{cases}
\alpha * x, \quad  \text{if} \ x < 0 \\
x,         \qquad  \text{if} \ x >= 0
\end{cases}
$$
Z
zchen0211 已提交
58

59
The input `X` can carry the LoD (Level of Details) information,
K
kexinzhao 已提交
60 61
or not. And the output shares the LoD information with input `X`.

Z
zchen0211 已提交
62 63 64 65 66
)DOC");
  }
};

// The operator to calculate gradients of a prelu operator.
Z
fix  
zchen0211 已提交
67
class PReluGradOp : public framework::OperatorWithKernel {
Z
zchen0211 已提交
68 69 70
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

71
  void InferShape(framework::InferShapeContext *ctx) const override {
Q
Qiao Longfei 已提交
72 73 74 75 76 77
    PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) must not be null.");
    PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")),
                   "Input(Out@GRAD) should not be null");
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("X"));
    ctx->SetOutputDim(framework::GradVarName("Alpha"),
                      ctx->GetInputDim("Alpha"));
Z
zchen0211 已提交
78 79 80 81 82 83 84 85
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;

Z
prelu  
zchen0211 已提交
86
REGISTER_OP(prelu, ops::PReluOp, ops::PReluOpMaker, prelu_grad,
Z
fix  
zchen0211 已提交
87
            ops::PReluGradOp);
Q
QI JUN 已提交
88 89 90 91 92
REGISTER_OP_CPU_KERNEL(
    prelu, ops::PReluKernel<paddle::platform::CPUDeviceContext, float>);
REGISTER_OP_CPU_KERNEL(
    prelu_grad,
    ops::PReluGradKernel<paddle::platform::CPUDeviceContext, float>);