grad_op_builder.cc 2.4 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
F
fengjiayi 已提交
11 12 13
WITHOpArgType::OUT WARRANTIES OR CONDITIONS OF ANY KOpArgType::IND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License. */
F
fengjiayi 已提交
14

15
#include "paddle/framework/grad_op_builder.h"
Y
Yu Yang 已提交
16
#include "paddle/framework/framework.pb.h"
F
fengjiayi 已提交
17
#include "paddle/framework/op_registry.h"
18 19 20

namespace paddle {
namespace framework {
21

22 23 24
class OpRegistry;

using VarIndexMap = std::unordered_map<std::string, int>;
25

F
fengjiayi 已提交
26
enum class OpArgType { IN, OUT };
27 28 29

static void TransOpArg(const OperatorBase* src_op, OperatorBase* dst_op,
                       const OpArgType& src_type, const OpArgType& dst_type,
30 31
                       bool is_grad) {
  const auto& src_inout =
F
fengjiayi 已提交
32
      src_type == OpArgType::IN ? src_op->inputs_ : src_op->outputs_;
33

34
  auto& dst_inout =
F
fengjiayi 已提交
35
      dst_type == OpArgType::IN ? dst_op->inputs_ : dst_op->outputs_;
36
  const OpProto& proto = OpRegistry::protos().at(src_op->type_);
F
fengjiayi 已提交
37 38
  const auto& src_arg_list =
      src_type == OpArgType::IN ? proto.inputs() : proto.outputs();
39 40 41

  for (const auto& arg : src_arg_list) {
    std::string src_name = arg.name();
42 43 44 45 46
    std::string dst_name = is_grad ? GradVarName(src_name) : src_name;
    for (auto& var_name : src_inout.at(src_name)) {
      std::string s = is_grad ? GradVarName(var_name)
                              : (arg.no_gradient() ? kEmptyVarName : var_name);
      dst_inout[dst_name].emplace_back(s);
47 48 49 50
    }
  }
}

51 52 53 54 55
OperatorBase* BuildGradOp(const OperatorBase* op) {
  std::string grad_op_type = OpRegistry::grad_ops().at(op->type_);
  OperatorBase* grad_op = OpRegistry::op_creators().at(grad_op_type)();
  grad_op->type_ = grad_op_type;
  grad_op->attrs_ = op->attrs_;
56 57 58 59
  TransOpArg(op, grad_op, OpArgType::IN, OpArgType::IN, false);   // I
  TransOpArg(op, grad_op, OpArgType::OUT, OpArgType::IN, false);  // O
  TransOpArg(op, grad_op, OpArgType::OUT, OpArgType::IN, true);   // OG
  TransOpArg(op, grad_op, OpArgType::IN, OpArgType::OUT, true);   // IG
60
  return grad_op;
61
}
62

63
}  // namespace framework
F
fengjiayi 已提交
64
}  // namespace paddle