grad_op_builder.cc 2.4 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
F
fengjiayi 已提交
11 12 13
WITHOpArgType::OUT WARRANTIES OR CONDITIONS OF ANY KOpArgType::IND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License. */
F
fengjiayi 已提交
14

15
#include "paddle/framework/grad_op_builder.h"
F
fengjiayi 已提交
16
#include "paddle/framework/op_registry.h"
17 18 19

namespace paddle {
namespace framework {
F
fengjiayi 已提交
20
enum class OpArgType { IN, OUT };
21

22 23 24 25 26
using VarNameMap = OperatorBase::VarNameMap;

static VarNameMap TransOpArg(const OperatorBase* src_op,
                             const OpArgType& src_type,
                             const OpArgType& dst_type, bool is_grad) {
27
  const auto& src_inout =
28 29
      src_type == OpArgType::IN ? src_op->Inputs() : src_op->Outputs();
  VarNameMap dst_inout;
Q
qingqing01 已提交
30

31
  const OpProto& proto = OpProtos().at(src_op->Type());
F
fengjiayi 已提交
32 33
  const auto& src_arg_list =
      src_type == OpArgType::IN ? proto.inputs() : proto.outputs();
34
  for (const auto& arg : src_arg_list) {
Q
qingqing01 已提交
35
    if (arg.no_gradient() && !is_grad) continue;
Q
qingqing01 已提交
36
    const std::string src_name = arg.name();
37
    std::string dst_name = is_grad ? GradVarName(src_name) : src_name;
Q
qingqing01 已提交
38
    dst_inout[dst_name].reserve(src_inout.at(src_name).size());
39
    for (auto& var_name : src_inout.at(src_name)) {
Q
qingqing01 已提交
40
      std::string s = is_grad ? GradVarName(var_name) : var_name;
41
      dst_inout[dst_name].emplace_back(s);
42 43
    }
  }
44
  return dst_inout;
45 46
}

47
OperatorBase* BuildGradOp(const OperatorBase* op) {
48 49 50 51 52 53 54 55 56 57 58 59 60
  std::string grad_op_type = OpRegistry::grad_ops().at(op->Type());
  auto I = TransOpArg(op, OpArgType::IN, OpArgType::IN, false);   // I
  auto O = TransOpArg(op, OpArgType::OUT, OpArgType::IN, false);  // O
  auto OG = TransOpArg(op, OpArgType::OUT, OpArgType::IN, true);  // OG
  auto IG = TransOpArg(op, OpArgType::IN, OpArgType::OUT, true);  // IG
  // TODO(merge I/O/OG)
  VarNameMap GradIn;
  GradIn.insert(I.begin(), I.end());
  GradIn.insert(O.begin(), O.end());
  GradIn.insert(OG.begin(), OG.end());

  OperatorBase* grad_op = OpRegistry::op_creators().at(grad_op_type)(
      grad_op_type, GradIn, IG, op->Attrs());
61
  return grad_op;
62
}
63

64
}  // namespace framework
F
fengjiayi 已提交
65
}  // namespace paddle