reduce_sum_op.cc 5.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

W
Wu Yi 已提交
15
#include "paddle/fluid/operators/reduce_ops/reduce_sum_op.h"
W
wanghuancoder 已提交
16

17 18
#include <string>

19
#include "paddle/fluid/framework/infershape_utils.h"
20
#include "paddle/fluid/prim/api/composite_backward/composite_backward_api.h"
21 22 23
#include "paddle/phi/core/infermeta_utils.h"
#include "paddle/phi/infermeta/unary.h"

W
wanghuancoder 已提交
24 25 26 27 28 29 30 31 32
namespace paddle {
namespace framework {
class OpDesc;
}  // namespace framework
namespace imperative {
class OpBase;
}  // namespace imperative
}  // namespace paddle

33 34 35 36 37
namespace paddle {
namespace operators {

// NOTE: Input(Out) is unnecessary in reduce_sum_grad, and Input(X) needs no
// buffer
H
hong 已提交
38 39 40

template <typename T>
class ReduceSumOpGradMaker : public framework::SingleGradOpMaker<T> {
41
 public:
H
hong 已提交
42
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
43 44

 protected:
45
  void Apply(GradOpPtr<T> op) const override {
46
    op->SetType("reduce_sum_grad");
H
hong 已提交
47 48 49 50
    op->SetInput("X", this->Input("X"));
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetAttrMap(this->Attrs());
    op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
51
  }
52

53
  phi::KernelKey GetExpectedKernelType(
54
      const framework::ExecutionContext& ctx) const {
55
    int in_dtype = ctx.Attr<int>("out_dtype");
56
    if (in_dtype >= 0) {
57
      return phi::KernelKey(
58 59 60
          static_cast<framework::proto::VarType::Type>(in_dtype),
          ctx.GetPlace());
    }
61 62 63
    return phi::KernelKey(framework::OperatorWithKernel::IndicateVarDataType(
                              ctx, framework::GradVarName("Out")),
                          ctx.GetPlace());
64
  }
65 66
};

67
class ReduceSumCompositeGradOpMaker : public prim::CompositeGradOpMakerBase {
68
 public:
69
  using prim::CompositeGradOpMakerBase::CompositeGradOpMakerBase;
70 71
  void Apply() override {
    // get inputs
72 73
    paddle::Tensor x = this->GetSingleForwardInput("X");
    paddle::Tensor out_grad = this->GetSingleOutputGrad("Out");
74 75 76 77 78 79

    // get attr
    std::vector<int> axis = this->Attr<std::vector<int>>("dim");
    bool keep_dim = this->Attr<bool>("keep_dim");
    bool reduce_all = this->Attr<bool>("reduce_all");
    // get output
80
    paddle::Tensor x_grad_t = this->GetSingleInputGrad("X");
81 82

    // get output ptr
83
    paddle::Tensor* x_grad = this->GetOutputPtr(&x_grad_t);
84 85 86

    // get output orginal name
    std::string x_grad_name = this->GetOutputName(x_grad_t);
J
Jiabin Yang 已提交
87
    VLOG(6) << "Runing sum_grad composite func";
88 89 90 91 92 93 94 95
    // call composite backward func
    prim::sum_grad<prim::DescTensor>(
        x, out_grad, axis, keep_dim, reduce_all, x_grad);
    // recover output name
    this->RecoverOutputName(x_grad_t, x_grad_name);
  }
};

96 97 98 99 100 101 102 103 104 105 106 107 108 109
template <typename T>
class ReduceSumDoubleOpGradMaker : public framework::SingleGradOpMaker<T> {
 public:
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;

 protected:
  void Apply(GradOpPtr<T> op) const override {
    op->SetInput("X", this->OutputGrad(framework::GradVarName("X")));
    op->SetOutput("Out", this->InputGrad(framework::GradVarName("Out")));
    op->SetAttrMap(this->Attrs());
    op->SetType("reduce_sum");
  }
};

110
DECLARE_NO_NEED_BUFFER_VARS_INFERER(ReduceSumGradNoNeedBufferVarInferer, "X");
111 112 113 114
class ReduceSumVarTypeInference : public paddle::framework::VarTypeInference {
 public:
  void operator()(paddle::framework::InferVarTypeContext* ctx) const override {
    auto data_type = static_cast<paddle::framework::proto::VarType::Type>(
R
Ruibiao Chen 已提交
115
        PADDLE_GET_CONST(int, ctx->GetAttr("out_dtype")));
116
    if (data_type >= 0) {
117
      ctx->SetOutputDataType("Out", data_type);
118 119 120 121 122 123
    } else {
      auto x_type = ctx->GetInputDataType("X");
      if (x_type == framework::proto::VarType::BOOL ||
          x_type == framework::proto::VarType::INT32) {
        ctx->SetOutputDataType("Out", framework::proto::VarType::INT64);
      }
124 125 126
    }
  }
};
127 128 129 130

}  // namespace operators
}  // namespace paddle

131
class ReduceSumOpMaker : public ops::ReduceBaseOpMaker {
132 133 134 135 136
 protected:
  virtual std::string GetName() const { return "reduce_sum"; }
  virtual std::string GetOpType() const { return "Reduce reduce_sum"; }
};

137 138
DECLARE_INFER_SHAPE_FUNCTOR(reduce_sum,
                            ReduceSumInferShapeFunctor,
139
                            PD_INFER_META(phi::SumRawInferMeta));
140

141
REGISTER_OPERATOR(reduce_sum,
142
                  ops::ReduceBaseOp,
143
                  ReduceSumOpMaker,
144
                  ops::ReduceSumVarTypeInference,
H
hong 已提交
145
                  ops::ReduceSumOpGradMaker<paddle::framework::OpDesc>,
146
                  ops::ReduceSumOpGradMaker<paddle::imperative::OpBase>,
147
                  ops::ReduceSumCompositeGradOpMaker,
148
                  ReduceSumInferShapeFunctor);
149 150
REGISTER_OPERATOR(reduce_sum_grad,
                  ops::ReduceGradOp,
151 152
                  ops::ReduceSumDoubleOpGradMaker<paddle::framework::OpDesc>,
                  ops::ReduceSumDoubleOpGradMaker<paddle::imperative::OpBase>,
153
                  ops::ReduceSumGradNoNeedBufferVarInferer);