squared_l2_norm_op.cc 3.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/squared_l2_norm_op.h"
16

H
Huihuang Zheng 已提交
17 18
#include <memory>

19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
namespace paddle {
namespace operators {

using framework::Tensor;

class SquaredL2NormOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
    PADDLE_ENFORCE(ctx->HasOutput("Out"), "Output(Out) should be not null.");

    ctx->SetOutputDim("Out", {1});
  }
};

H
hong 已提交
36 37
template <typename T>
class SquaredL2NormGradOpMaker : public framework::SingleGradOpMaker<T> {
H
Huihuang Zheng 已提交
38
 public:
H
hong 已提交
39
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
H
Huihuang Zheng 已提交
40 41

 protected:
H
hong 已提交
42 43
  std::unique_ptr<T> Apply() const override {
    std::unique_ptr<T> op(new T());
H
Huihuang Zheng 已提交
44 45 46

    op->SetType("squared_l2_norm_grad");

H
hong 已提交
47 48
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetInput("X", this->Input("X"));
H
Huihuang Zheng 已提交
49

H
hong 已提交
50
    op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
H
Huihuang Zheng 已提交
51

H
hong 已提交
52
    op->SetAttrMap(this->Attrs());
H
Huihuang Zheng 已提交
53 54 55 56
    return op;
  }
};

57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
class SquaredL2NormGradOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
    PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")),
                   "Input(Out@GRAD) should be not null.");
    PADDLE_ENFORCE(ctx->HasOutput(framework::GradVarName("X")),
                   "Output(X@GRAD) should be not null.");

    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("X"));
  }
};

class SquaredL2NormOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
74
  void Make() override {
75
    AddInput("X", "(Tensor) The input of squared_l2_norm op.");
76
    AddOutput("Out", "(Scalar) The output of squared_l2_norm op.");
77 78 79 80 81
    AddComment(R"DOC(
SquaredL2Norm Operator.

Computes the squared L2 norm of a tensor.

82
$$Out = \sum_{i} X_{i}^2$$
83 84 85 86 87 88 89 90 91

)DOC");
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
Y
Yang Yang 已提交
92
REGISTER_OPERATOR(squared_l2_norm, ops::SquaredL2NormOp,
H
hong 已提交
93 94 95
                  ops::SquaredL2NormOpMaker,
                  ops::SquaredL2NormGradOpMaker<paddle::framework::OpDesc>,
                  ops::SquaredL2NormGradOpMaker<paddle::imperative::OpBase>);
96
REGISTER_OPERATOR(squared_l2_norm_grad, ops::SquaredL2NormGradOp);
97 98
REGISTER_OP_CPU_KERNEL(
    squared_l2_norm,
Q
QI JUN 已提交
99
    ops::SquaredL2NormKernel<paddle::platform::CPUDeviceContext, float>);
100 101
REGISTER_OP_CPU_KERNEL(
    squared_l2_norm_grad,
Q
QI JUN 已提交
102
    ops::SquaredL2NormGradKernel<paddle::platform::CPUDeviceContext, float>);