clip_by_norm_op.cc 2.7 KB
Newer Older
W
wwhu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

#include "paddle/operators/clip_by_norm_op.h"

namespace paddle {
namespace operators {

class ClipByNormOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

 protected:
  void InferShape(framework::InferShapeContext* ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"),
                   "Input(X) of ClipByNormOp should not be null.");
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   "Output(Out) of ClipByNormOp should not be null.");
W
fix CI  
wwhu 已提交
30
    auto max_norm = ctx->Attrs().Get<float>("max_norm");
W
wwhu 已提交
31 32 33 34 35 36 37 38 39 40
    PADDLE_ENFORCE_GT(max_norm, 0, "max_norm should be greater than 0.");
    auto x_dims = ctx->GetInputDim("X");
    ctx->SetOutputDim("Out", x_dims);
    ctx->ShareLoD("X", /*->*/ "Out");
  }
};

class ClipByNormOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  ClipByNormOpMaker(framework::OpProto* proto,
W
wwhu 已提交
41
                    framework::OpAttrChecker* op_checker)
W
wwhu 已提交
42 43
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X",
W
wwhu 已提交
44
             "(Tensor) The input of clip_by_norm op."
W
wwhu 已提交
45 46
             "The number of dimensions must be between [1, 9].");
    AddOutput("Out",
W
wwhu 已提交
47
              "(Tensor) The output of clip_by_norm op with shape as input(X)");
W
fix CI  
wwhu 已提交
48
    AddAttr<float>("max_norm", "(float) The maximum norm value.");
W
wwhu 已提交
49
    AddComment(R"DOC(
50
ClipByNorm Operator.
W
wwhu 已提交
51

52 53 54 55 56
This operator limits the L2 norm of the input $X$ within $max\_norm$.
If the L2 norm of $X$ is less than or equal to $max\_norm$, $Out$ will be
the same as $X$. If the L2 norm of $X$ is greater than $max\_norm$, $X$ will
be linearly scaled to make the L2 norm of $Out$ equal to $max\_norm$, as
shown in the following formula:
W
wwhu 已提交
57

58 59 60 61 62
$$
Out = \frac{max\_norm * X}{norm(X)},
$$

where $norm(X)$ represents the L2 norm of $X$.
W
wwhu 已提交
63 64 65 66 67 68 69 70
)DOC");
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
W
wwhu 已提交
71
REGISTER_OP_WITHOUT_GRADIENT(clip_by_norm, ops::ClipByNormOp,
W
fix CI  
wwhu 已提交
72
                             ops::ClipByNormOpMaker);
W
wwhu 已提交
73
REGISTER_OP_CPU_KERNEL(
Q
QI JUN 已提交
74 75
    clip_by_norm,
    ops::ClipByNormKernel<paddle::platform::CPUDeviceContext, float>);