grad_op_builder.cc 4.4 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
F
fengjiayi 已提交
11 12 13
WITHOpArgType::OUT WARRANTIES OR CONDITIONS OF ANY KOpArgType::IND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License. */
F
fengjiayi 已提交
14

15
#include "paddle/framework/grad_op_builder.h"
16
#include "paddle/framework/op_proto.pb.h"
F
fengjiayi 已提交
17
#include "paddle/framework/op_registry.h"
18 19 20 21

namespace paddle {
namespace framework {

22 23 24
class OpRegistry;

using VarIndexMap = std::unordered_map<std::string, int>;
25

F
fengjiayi 已提交
26
enum class OpArgType { IN, OUT };
27 28

static std::vector<int>* GetOpFormat(OperatorBase* op, const OpArgType& type) {
F
fengjiayi 已提交
29
  std::string key = type == OpArgType::IN ? "input_format" : "output_format";
30 31 32
  return op->attrs_.count(key)
             ? &boost::get<std::vector<int>>(op->attrs_.at(key))
             : nullptr;
33 34
}

35 36
static const std::vector<int>* GetOpFormat(const OperatorBase* op,
                                           const OpArgType& type) {
F
fengjiayi 已提交
37
  std::string key = type == OpArgType::IN ? "input_format" : "output_format";
38 39 40
  return op->attrs_.count(key)
             ? &boost::get<std::vector<int>>(op->attrs_.at(key))
             : nullptr;
41 42
}

43 44 45 46
static void TransOpArg(const OperatorBase* src_op, OperatorBase* dst_op,
                       const OpArgType& src_type, const OpArgType& dst_type,
                       int& idx, bool is_grad) {
  const std::vector<std::string>& src_inout =
F
fengjiayi 已提交
47
      src_type == OpArgType::IN ? src_op->inputs_ : src_op->outputs_;
48 49 50
  const std::vector<int>* src_format = GetOpFormat(src_op, src_type);

  std::vector<std::string>& dst_inout =
F
fengjiayi 已提交
51
      dst_type == OpArgType::IN ? dst_op->inputs_ : dst_op->outputs_;
52 53
  std::vector<int>* dst_format = GetOpFormat(dst_op, dst_type);
  const OpProto& proto = OpRegistry::protos().at(src_op->type_);
F
fengjiayi 已提交
54 55
  const auto& src_arg_list =
      src_type == OpArgType::IN ? proto.inputs() : proto.outputs();
56 57 58

  for (const auto& arg : src_arg_list) {
    std::string src_name = arg.name();
Y
Yi Wang 已提交
59
    std::string dst_name = is_grad ? src_name + kGradVarSuffix : src_name;
F
fengjiayi 已提交
60 61
    (*dst_op->in_out_idxs_)[dst_name] = idx++;
    int src_arg_idx = src_op->in_out_idxs_->at(src_name);
62 63 64 65 66
    int src_begin =
        src_format == nullptr ? src_arg_idx : src_format->at(src_arg_idx);
    int src_end = src_format == nullptr ? src_arg_idx + 1
                                        : src_format->at(src_arg_idx + 1);
    for (int i = src_begin; i < src_end; ++i) {
Y
Yi Wang 已提交
67 68 69
      std::string s =
          is_grad ? src_inout[i] + kGradVarSuffix
                  : (arg.ignore_gradient() ? kEmptyVarName : src_inout[i]);
70 71 72 73
      dst_inout.emplace_back(s);
    }
    if (dst_format != nullptr) {
      dst_format->push_back(dst_inout.size());
74 75 76 77
    }
  }
}

78
OperatorBase* BuildGradOp(const OperatorBase* op) {
F
WIP  
fengjiayi 已提交
79 80 81 82 83 84 85 86 87 88
  auto it = op_info_map().find(op->type_);
  PADDLE_ENFORCE(it != OpRegistry::op_info_map().end(),
                 "'%s' has not been registered.", op->type);
  std::string grad_op_type = it->second.grad_op_type_;
  PADDLE_ENFORCE(!grad_op_type.empty(), "'%s' has no gradient operator.",
                 op->type);
  it = op_info_map().find(grad_op_type);
  PADDLE_ENFORCE(it != OpRegistry::op_info_map().end(),
                 "'%s' has not been registered.", grad_op_type);
  OperatorBase* grad_op = it->second.creator_();
89 90
  grad_op->type_ = grad_op_type;
  grad_op->attrs_ = op->attrs_;
91 92
  grad_op->attrs_.erase("input_format");
  grad_op->attrs_.erase("output_format");
F
fengjiayi 已提交
93
  if (GetOpFormat(op, OpArgType::IN) != nullptr) {
94 95
    grad_op->attrs_["output_format"] = std::vector<int>({0});
  }
F
fengjiayi 已提交
96 97
  if (GetOpFormat(op, OpArgType::IN) != nullptr ||
      GetOpFormat(op, OpArgType::OUT) != nullptr) {
98 99 100
    grad_op->attrs_["input_format"] = std::vector<int>({0});
  }
  grad_op->in_out_idxs_.reset(new VarIndexMap());
101 102
  int in_idx = 0;
  int out_idx = 0;
F
fengjiayi 已提交
103 104 105 106
  TransOpArg(op, grad_op, OpArgType::IN, OpArgType::IN, in_idx, false);   // I
  TransOpArg(op, grad_op, OpArgType::OUT, OpArgType::IN, in_idx, false);  // G
  TransOpArg(op, grad_op, OpArgType::OUT, OpArgType::IN, in_idx, true);   // OG
  TransOpArg(op, grad_op, OpArgType::IN, OpArgType::OUT, out_idx, true);  // IG
107
  return grad_op;
108 109 110
}

}  // namespace framework
F
fengjiayi 已提交
111
}  // namespace paddle