grad_op_builder.cc 2.7 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
F
fengjiayi 已提交
11 12 13
WITHOpArgType::OUT WARRANTIES OR CONDITIONS OF ANY KOpArgType::IND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License. */
F
fengjiayi 已提交
14

15
#include "paddle/framework/grad_op_builder.h"
F
fengjiayi 已提交
16
#include "paddle/framework/op_registry.h"
17 18 19

namespace paddle {
namespace framework {
F
fengjiayi 已提交
20
enum class OpArgType { IN, OUT };
21

22 23
static void TransOpArg(const OperatorBase* src_op, const OpArgType& src_type,
                       bool is_grad, OperatorBase::VarNameMap* vars) {
24
  const auto& src_inout =
Q
qiaolongfei 已提交
25
      src_type == OpArgType::IN ? src_op->Inputs() : src_op->Outputs();
Y
Yu Yang 已提交
26
  auto& dst_inout = *vars;
27
  const OpProto* proto = OpRegistry::op_info_map().at(src_op->Type()).proto_;
F
fengjiayi 已提交
28
  const auto& src_arg_list =
F
fengjiayi 已提交
29
      src_type == OpArgType::IN ? proto->inputs() : proto->outputs();
30
  for (const auto& arg : src_arg_list) {
Q
qingqing01 已提交
31
    if (arg.no_gradient() && !is_grad) continue;
Q
qingqing01 已提交
32
    const std::string src_name = arg.name();
33
    std::string dst_name = is_grad ? GradVarName(src_name) : src_name;
Q
qingqing01 已提交
34
    dst_inout[dst_name].reserve(src_inout.at(src_name).size());
35
    for (auto& var_name : src_inout.at(src_name)) {
Q
qingqing01 已提交
36
      std::string s = is_grad ? GradVarName(var_name) : var_name;
37
      dst_inout[dst_name].emplace_back(s);
38 39 40 41
    }
  }
}

42
OperatorBase* BuildGradOp(const OperatorBase* op) {
43
  auto it = OpRegistry::op_info_map().find(op->Type());
F
WIP  
fengjiayi 已提交
44
  PADDLE_ENFORCE(it != OpRegistry::op_info_map().end(),
45
                 "'%s' has not been registered.", op->Type());
F
fengjiayi 已提交
46
  PADDLE_ENFORCE(it->second.proto_ != nullptr, "'%s' has no OpProto.",
47
                 op->Type());
F
WIP  
fengjiayi 已提交
48 49
  std::string grad_op_type = it->second.grad_op_type_;
  PADDLE_ENFORCE(!grad_op_type.empty(), "'%s' has no gradient operator.",
50
                 op->Type());
Y
Yi Wang 已提交
51

Y
Yu Yang 已提交
52 53
  OperatorBase::VarNameMap inputs;
  OperatorBase::VarNameMap outputs;
54 55 56 57
  TransOpArg(op, OpArgType::IN, false, &inputs);   // I
  TransOpArg(op, OpArgType::OUT, false, &inputs);  // O
  TransOpArg(op, OpArgType::OUT, true, &inputs);   // OG
  TransOpArg(op, OpArgType::IN, true, &outputs);   // IG
58

F
fengjiayi 已提交
59
  it = OpRegistry::op_info_map().find(grad_op_type);
F
WIP  
fengjiayi 已提交
60 61
  PADDLE_ENFORCE(it != OpRegistry::op_info_map().end(),
                 "'%s' has not been registered.", grad_op_type);
62
  return it->second.creator_(grad_op_type, inputs, outputs, op->Attrs());
63
}
64

65
}  // namespace framework
F
fengjiayi 已提交
66
}  // namespace paddle