clip_by_norm_op.cc 2.6 KB
Newer Older
W
wwhu 已提交
1 2
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
W
wwhu 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
W
wwhu 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
W
wwhu 已提交
14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29

#include "paddle/operators/clip_by_norm_op.h"

namespace paddle {
namespace operators {

class ClipByNormOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

 protected:
  void InferShape(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"),
                   "Input(X) of ClipByNormOp should not be null.");
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   "Output(Out) of ClipByNormOp should not be null.");
W
fix CI  
wwhu 已提交
30
    auto max_norm = ctx->Attrs().Get<float>("max_norm");
W
wwhu 已提交
31 32 33 34 35 36 37 38 39
    PADDLE_ENFORCE_GT(max_norm, 0, "max_norm should be greater than 0.");
    auto x_dims = ctx->GetInputDim("X");
    ctx->SetOutputDim("Out", x_dims);
    ctx->ShareLoD("X", /*->*/ "Out");
  }
};

class ClipByNormOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
40
  ClipByNormOpMaker(OpProto* proto, OpAttrChecker* op_checker)
W
wwhu 已提交
41 42
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X",
W
wwhu 已提交
43
             "(Tensor) The input of clip_by_norm op."
W
wwhu 已提交
44 45
             "The number of dimensions must be between [1, 9].");
    AddOutput("Out",
W
wwhu 已提交
46
              "(Tensor) The output of clip_by_norm op with shape as input(X)");
W
fix CI  
wwhu 已提交
47
    AddAttr<float>("max_norm", "(float) The maximum norm value.");
W
wwhu 已提交
48
    AddComment(R"DOC(
49
ClipByNorm Operator.
W
wwhu 已提交
50

51 52 53 54 55
This operator limits the L2 norm of the input $X$ within $max\_norm$.
If the L2 norm of $X$ is less than or equal to $max\_norm$, $Out$ will be
the same as $X$. If the L2 norm of $X$ is greater than $max\_norm$, $X$ will
be linearly scaled to make the L2 norm of $Out$ equal to $max\_norm$, as
shown in the following formula:
W
wwhu 已提交
56

57 58 59 60 61
$$
Out = \frac{max\_norm * X}{norm(X)},
$$

where $norm(X)$ represents the L2 norm of $X$.
W
wwhu 已提交
62 63 64 65 66 67 68 69
)DOC");
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
W
wwhu 已提交
70
REGISTER_OP_WITHOUT_GRADIENT(clip_by_norm, ops::ClipByNormOp,
W
fix CI  
wwhu 已提交
71
                             ops::ClipByNormOpMaker);
W
wwhu 已提交
72
REGISTER_OP_CPU_KERNEL(
Q
QI JUN 已提交
73 74
    clip_by_norm,
    ops::ClipByNormKernel<paddle::platform::CPUDeviceContext, float>);