squared_l2_norm_op.cc 3.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
14

Y
Yi Wang 已提交
15
#include "paddle/fluid/operators/squared_l2_norm_op.h"
16

H
Huihuang Zheng 已提交
17 18
#include <memory>

19 20 21 22 23 24 25 26 27 28
namespace paddle {
namespace operators {

using framework::Tensor;

class SquaredL2NormOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext* ctx) const override {
29 30
    OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "SquaredL2NormOp");
    OP_INOUT_CHECK(ctx->HasOutput("Out"), "Output", "Out", "SquaredL2NormOp");
31 32 33 34 35

    ctx->SetOutputDim("Out", {1});
  }
};

H
hong 已提交
36 37
template <typename T>
class SquaredL2NormGradOpMaker : public framework::SingleGradOpMaker<T> {
H
Huihuang Zheng 已提交
38
 public:
H
hong 已提交
39
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
H
Huihuang Zheng 已提交
40 41

 protected:
42
  void Apply(GradOpPtr<T> op) const override {
H
Huihuang Zheng 已提交
43 44
    op->SetType("squared_l2_norm_grad");

H
hong 已提交
45 46
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetInput("X", this->Input("X"));
H
Huihuang Zheng 已提交
47

H
hong 已提交
48
    op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
H
Huihuang Zheng 已提交
49

H
hong 已提交
50
    op->SetAttrMap(this->Attrs());
H
Huihuang Zheng 已提交
51 52 53
  }
};

54 55 56 57 58
class SquaredL2NormGradOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext* ctx) const override {
59 60 61 62 63
    OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "SquaredL2NormGradOp");
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
                   "Out@GRAD", "SquaredL2NormGradOp");
    OP_INOUT_CHECK(ctx->HasOutput(framework::GradVarName("X")), "Output",
                   "X@GRAD", "SquaredL2NormGradOp");
64 65 66 67 68 69 70

    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("X"));
  }
};

class SquaredL2NormOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
71
  void Make() override {
72
    AddInput("X", "(Tensor) The input of squared_l2_norm op.");
73
    AddOutput("Out", "(Scalar) The output of squared_l2_norm op.");
74 75 76 77 78
    AddComment(R"DOC(
SquaredL2Norm Operator.

Computes the squared L2 norm of a tensor.

79
$$Out = \sum_{i} X_{i}^2$$
80 81 82 83 84 85 86 87 88

)DOC");
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
Y
Yang Yang 已提交
89
REGISTER_OPERATOR(squared_l2_norm, ops::SquaredL2NormOp,
H
hong 已提交
90 91 92
                  ops::SquaredL2NormOpMaker,
                  ops::SquaredL2NormGradOpMaker<paddle::framework::OpDesc>,
                  ops::SquaredL2NormGradOpMaker<paddle::imperative::OpBase>);
93
REGISTER_OPERATOR(squared_l2_norm_grad, ops::SquaredL2NormGradOp);
94 95
REGISTER_OP_CPU_KERNEL(
    squared_l2_norm,
Q
QI JUN 已提交
96
    ops::SquaredL2NormKernel<paddle::platform::CPUDeviceContext, float>);
97 98
REGISTER_OP_CPU_KERNEL(
    squared_l2_norm_grad,
Q
QI JUN 已提交
99
    ops::SquaredL2NormGradKernel<paddle::platform::CPUDeviceContext, float>);