reduce_mean_op.cc 4.7 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

W
Wu Yi 已提交
15
#include "paddle/fluid/operators/reduce_ops/reduce_mean_op.h"
L
lvmengsi 已提交
16 17
#include <memory>
#include <string>
18
#include <utility>
L
lvmengsi 已提交
19
#include <vector>
20

L
lvmengsi 已提交
21 22 23 24 25 26 27
namespace paddle {
namespace operators {

// NOTE(dengkaipeng): Input(Out) is unnecessary in reduce_mean_grad
// calcualtion, but will incur a reduce_mean_grad op after
// reduce_mean_grad_grad, delete Input(Out) here.
// This change has no effect on reduce_mean_grad calculations.
H
hong 已提交
28 29
template <typename T>
class ReduceMeanOpGradMaker : public framework::SingleGradOpMaker<T> {
L
lvmengsi 已提交
30
 public:
H
hong 已提交
31
  using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
L
lvmengsi 已提交
32 33

 protected:
34
  void Apply(GradOpPtr<T> op) const override {
L
lvmengsi 已提交
35
    op->SetType("reduce_mean_grad");
H
hong 已提交
36 37 38 39
    op->SetInput("X", this->Input("X"));
    op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out"));
    op->SetAttrMap(this->Attrs());
    op->SetOutput(framework::GradVarName("X"), this->InputGrad("X"));
L
lvmengsi 已提交
40 41 42
  }
};

H
hong 已提交
43
class ReduceMeanDoubleGradDescMaker : public framework::GradOpDescMakerBase {
L
lvmengsi 已提交
44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
 public:
  using framework::GradOpDescMakerBase::GradOpDescMakerBase;

  std::vector<std::unique_ptr<framework::OpDesc>> operator()() const override {
    std::vector<std::unique_ptr<framework::OpDesc>> ops;
    auto x_gg = OutputGrad(framework::GradVarName("X"));  // input ddx
    auto out_grads = InputGrad(framework::GradVarName("Out"));
    if (!out_grads.empty()) {
      auto* out_grad_op = new framework::OpDesc();
      out_grad_op->SetType("reduce_mean");
      out_grad_op->SetInput("X", x_gg);
      out_grad_op->SetAttrMap(Attrs());
      out_grad_op->SetOutput("Out", out_grads);
      ops.emplace_back(out_grad_op);
    }

    return ops;
  }
};
H
hong 已提交
63 64 65 66
class ReduceMeanDoubleGradOpBaseMaker : public imperative::GradOpBaseMakerBase {
 public:
  using imperative::GradOpBaseMakerBase::GradOpBaseMakerBase;

67
  std::shared_ptr<imperative::GradOpNode> operator()() const override {
H
hong 已提交
68 69
    auto out_grads = InputGrad(framework::GradVarName("Out"));
    if (!out_grads.empty()) {
70 71 72 73 74 75 76 77 78 79 80 81
      auto x_gg = OutputGrad(framework::GradVarName("X"));  // input ddx
      auto node = this->NewGradNode();
      {
        imperative::TracedGradOp op(node);
        op.SetType("reduce_mean");
        op.SetInput("X", x_gg);
        op.SetAttrMap(Attrs());
        op.SetOutput("Out", out_grads);
      }
      return node;
    } else {
      return nullptr;
H
hong 已提交
82 83 84
    }
  }
};
85 86
DECLARE_NO_NEED_BUFFER_VARS_INFERENCE(ReduceMeanGradNoNeedBufferVarInference,
                                      "X");
L
lvmengsi 已提交
87 88 89 90 91 92 93 94 95 96
}  // namespace operators
}  // namespace paddle

class __reduce_meanMaker__ : public ops::ReduceOpMaker {
 protected:
  virtual std::string GetName() const { return "reduce_mean"; }
  virtual std::string GetOpType() const { return "Reduce reduce_mean"; }
};

REGISTER_OPERATOR(reduce_mean, ops::ReduceOp, __reduce_meanMaker__,
H
hong 已提交
97 98
                  ops::ReduceMeanOpGradMaker<paddle::framework::OpDesc>,
                  ops::ReduceMeanOpGradMaker<paddle::imperative::OpBase>);
L
lvmengsi 已提交
99
REGISTER_OPERATOR(reduce_mean_grad, ops::ReduceGradOp,
H
hong 已提交
100 101
                  ops::ReduceMeanDoubleGradDescMaker,
                  ops::ReduceMeanDoubleGradOpBaseMaker,
102
                  ops::ReduceMeanGradNoNeedBufferVarInference);
103 104 105 106 107 108 109 110 111
REGISTER_OP_CPU_KERNEL(reduce_mean,
                       ops::ReduceKernel<paddle::platform::CPUDeviceContext,
                                         float, ops::MeanFunctor>,
                       ops::ReduceKernel<paddle::platform::CPUDeviceContext,
                                         double, ops::MeanFunctor>,
                       ops::ReduceKernel<paddle::platform::CPUDeviceContext,
                                         int, ops::MeanFunctor>,
                       ops::ReduceKernel<paddle::platform::CPUDeviceContext,
                                         int64_t, ops::MeanFunctor>);
112 113 114 115 116 117 118 119 120 121

template <typename T>
using CPUReduceMeanGradKernel =
    ops::ReduceGradKernel<paddle::platform::CPUDeviceContext, T,
                          ops::MeanGradFunctor, true>;

REGISTER_OP_CPU_KERNEL(reduce_mean_grad, CPUReduceMeanGradKernel<float>,
                       CPUReduceMeanGradKernel<double>,
                       CPUReduceMeanGradKernel<int>,
                       CPUReduceMeanGradKernel<int64_t>);