cum_op.cc 6.7 KB
Newer Older
1
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
E
emailweixu 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15
#include "paddle/fluid/framework/infershape_utils.h"
16
#include "paddle/fluid/framework/op_registry.h"
17
#include "paddle/fluid/framework/op_version_registry.h"
18 19
#include "paddle/phi/core/infermeta_utils.h"
#include "paddle/phi/infermeta/unary.h"
E
emailweixu 已提交
20 21 22 23 24 25 26 27 28 29 30

namespace paddle {
namespace operators {

class CumOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;
};

class CumsumOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
31
  void Make() override {
Y
yuyang18 已提交
32 33
    AddInput("X", "Input of cumsum operator");
    AddOutput("Out", "Output of cumsum operator");
E
emailweixu 已提交
34
    AddAttr<int>("axis",
T
tianshuo78520a 已提交
35 36
                 "The dimension to accumulate along. -1 means the last "
                 "dimension [default -1].")
37 38 39 40 41
        .SetDefault(-1);
    AddAttr<bool>("flatten",
                  "Whether to compute the cumsum over the flattened array. "
                  "[default false].")
        .SetDefault(false);
E
emailweixu 已提交
42
    AddAttr<bool>("exclusive",
Y
yuyang18 已提交
43
                  "Whether to perform exclusive cumsum. [default false].")
E
emailweixu 已提交
44 45
        .SetDefault(false);
    AddAttr<bool>("reverse",
Y
yuyang18 已提交
46 47
                  "If true, the cumsum is performed in the reversed direction. "
                  "[default false].")
E
emailweixu 已提交
48 49 50 51
        .SetDefault(false);
    AddComment(R"DOC(
The cumulative sum of the elements along a given axis.
By default, the first element of the result is the same of the first element of
52
the input. If exclusive is true, the first element of the result is 0.
E
emailweixu 已提交
53 54 55 56
)DOC");
  }
};

H
hong 已提交
57 58
template <typename T>
class CumsumGradMaker : public framework::SingleGradOpMaker<T> {
E
emailweixu 已提交
59
 public:
H
hong 已提交
60
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
E
emailweixu 已提交
61 62

 protected:
63
  void Apply(GradOpPtr<T> grad_op) const override {
E
emailweixu 已提交
64
    grad_op->SetType("cumsum");
H
hong 已提交
65 66
    grad_op->SetInput("X", this->OutputGrad("Out"));
    grad_op->SetOutput("Out", this->InputGrad("X"));
67
    grad_op->SetAttr("axis", BOOST_GET_CONST(int, this->GetAttr("axis")));
68 69
    grad_op->SetAttr("flatten",
                     BOOST_GET_CONST(bool, this->GetAttr("flatten")));
70 71 72 73
    grad_op->SetAttr("reverse",
                     !BOOST_GET_CONST(bool, this->GetAttr("reverse")));
    grad_op->SetAttr("exclusive",
                     BOOST_GET_CONST(bool, this->GetAttr("exclusive")));
E
emailweixu 已提交
74 75 76
  }
};

77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
class LogcumsumexpOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  void Make() override {
    AddInput("X", "Input of logcumsumexp operator");
    AddOutput("Out", "Output of logcumsumexp operator");
    AddAttr<int>("axis",
                 "The dimension to accumulate along. -1 means the last "
                 "dimension [default -1].")
        .SetDefault(-1);
    AddAttr<bool>("flatten",
                  "Whether to compute the logcumsumexp over the flattened array. "
                  "[default false].")
        .SetDefault(false);
    AddAttr<bool>("exclusive",
                  "Whether to perform exclusive logcumsumexp. [default false].")
        .SetDefault(false);
    AddAttr<bool>("reverse",
                  "If true, the logcumsumexp is performed in the reversed direction. "
                  "[default false].")
        .SetDefault(false);
    AddComment(R"DOC(
Returns the logarithm of the cumulative summation of the exponentiation of elements of input along the given axis.
By default, the first element of the result is the same of the first element of
the input. If exclusive is true, the first element of the result is the lowest finite value of the dtype of output tensor.
)DOC");
  }
};

class LogcumsumexpGradOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

  void InferShape(framework::InferShapeContext* ctx) const override {
    OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "logcumsumexp");
    OP_INOUT_CHECK(ctx->HasInput("Out"), "Input", "Out", "logcumsumexp");
    OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
                   "Out@GRAD", "logcumsumexp");
    ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("X"));
  }
};

template <typename T>
class LogcumsumexpGradMaker : public framework::SingleGradOpMaker<T> {
 public:
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;

 protected:
  void Apply(GradOpPtr<T> grad_op) const override {
    grad_op->SetType("logcumsumexp_grad");
    grad_op->SetInput("X", this->Input("X"));
    grad_op->SetInput("Out", this->Output("Out"));
    grad_op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    grad_op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
    grad_op->SetAttr("axis", BOOST_GET_CONST(int, this->GetAttr("axis")));
    grad_op->SetAttr("flatten",
                     BOOST_GET_CONST(bool, this->GetAttr("flatten")));
    grad_op->SetAttr("exclusive",
                     BOOST_GET_CONST(bool, this->GetAttr("exclusive")));
    grad_op->SetAttr("reverse",
                     BOOST_GET_CONST(bool, this->GetAttr("reverse")));
  }
};

E
emailweixu 已提交
140 141 142 143 144
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
using CPU = paddle::platform::CPUDeviceContext;
145
DECLARE_INFER_SHAPE_FUNCTOR(cumsum, CumsumInferShapeFunctor,
146 147 148
                            PD_INFER_META(phi::CumInferMeta));
DECLARE_INFER_SHAPE_FUNCTOR(logcumsumexp, LogcumsumexpInferShapeFunctor,
                            PD_INFER_META(phi::CumInferMeta));
H
hong 已提交
149 150
REGISTER_OPERATOR(cumsum, ops::CumOp, ops::CumsumOpMaker,
                  ops::CumsumGradMaker<paddle::framework::OpDesc>,
151 152
                  ops::CumsumGradMaker<paddle::imperative::OpBase>,
                  CumsumInferShapeFunctor);
153 154 155 156 157
REGISTER_OPERATOR(logcumsumexp, ops::CumOp, ops::LogcumsumexpOpMaker,
                  ops::LogcumsumexpGradMaker<paddle::framework::OpDesc>,
                  ops::LogcumsumexpGradMaker<paddle::imperative::OpBase>,
                  LogcumsumexpInferShapeFunctor);
REGISTER_OPERATOR(logcumsumexp_grad, ops::LogcumsumexpGradOp);
158

159 160
REGISTER_OP_VERSION(cumsum).AddCheckpoint(
    R"ROC(
161 162
      Upgrade cumsum add a new attribute [flatten].
    )ROC",
163 164 165 166 167
    paddle::framework::compatible::OpVersionDesc().NewAttr(
        "flatten",
        "In order to compute the cumsum over the flattened array when the "
        "argument `axis` in python API is None.",
        false));