grad_op_builder.cc 2.6 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
F
fengjiayi 已提交
11 12 13
WITHOpArgType::OUT WARRANTIES OR CONDITIONS OF ANY KOpArgType::IND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License. */
F
fengjiayi 已提交
14

15
#include "paddle/framework/grad_op_builder.h"
Y
Yu Yang 已提交
16
#include "paddle/framework/framework.pb.h"
F
fengjiayi 已提交
17
#include "paddle/framework/op_registry.h"
18 19 20

namespace paddle {
namespace framework {
F
fengjiayi 已提交
21
enum class OpArgType { IN, OUT };
22

Y
Yu Yang 已提交
23 24 25
static void TransOpArg(const OperatorBase* src_op,
                       OperatorBase::VarNameMap* vars,
                       const OpArgType& src_type, bool is_grad) {
26
  const auto& src_inout =
F
fengjiayi 已提交
27
      src_type == OpArgType::IN ? src_op->inputs_ : src_op->outputs_;
Y
Yu Yang 已提交
28
  auto& dst_inout = *vars;
Q
qingqing01 已提交
29 30

  const OpProto& proto = OpProtos().at(src_op->type_);
F
fengjiayi 已提交
31 32
  const auto& src_arg_list =
      src_type == OpArgType::IN ? proto.inputs() : proto.outputs();
33
  for (const auto& arg : src_arg_list) {
Q
qingqing01 已提交
34
    if (arg.no_gradient() && !is_grad) continue;
Q
qingqing01 已提交
35
    const std::string src_name = arg.name();
36
    std::string dst_name = is_grad ? GradVarName(src_name) : src_name;
Q
qingqing01 已提交
37
    dst_inout[dst_name].reserve(src_inout.at(src_name).size());
38
    for (auto& var_name : src_inout.at(src_name)) {
Q
qingqing01 已提交
39
      std::string s = is_grad ? GradVarName(var_name) : var_name;
40
      dst_inout[dst_name].emplace_back(s);
41 42 43 44
    }
  }
}

45
OperatorBase* BuildGradOp(const OperatorBase* op) {
Y
Yu Yang 已提交
46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61
  auto gop_type_it = OpRegistry::grad_ops().find(op->type_);
  PADDLE_ENFORCE(gop_type_it != OpRegistry::grad_ops().end(),
                 "Operator %s do not register gradient type", op->type_);
  auto& grad_op_type = gop_type_it->second;
  OperatorBase::VarNameMap inputs;
  OperatorBase::VarNameMap outputs;
  TransOpArg(op, &inputs, OpArgType::IN, false);   // I
  TransOpArg(op, &inputs, OpArgType::OUT, false);  // O
  TransOpArg(op, &inputs, OpArgType::OUT, true);   // OG
  TransOpArg(op, &outputs, OpArgType::IN, true);   // IG
  auto gop_it = OpRegistry::op_creators().find(grad_op_type);
  PADDLE_ENFORCE(gop_it != OpRegistry::op_creators().end(),
                 "Operator %s 's Gradient %s's creator cannot be found",
                 op->type_, grad_op_type);

  return gop_it->second(grad_op_type, inputs, outputs, op->attrs_);
62
}
63

64
}  // namespace framework
F
fengjiayi 已提交
65
}  // namespace paddle