grad_op_builder.cc 4.8 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
F
fengjiayi 已提交
11 12 13
WITHOpArgType::OUT WARRANTIES OR CONDITIONS OF ANY KOpArgType::IND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License. */
F
fengjiayi 已提交
14

15
#include "paddle/framework/grad_op_builder.h"
16
#include "paddle/framework/op_proto.pb.h"
F
fengjiayi 已提交
17
#include "paddle/framework/op_registry.h"
18 19 20 21

namespace paddle {
namespace framework {

Y
Yi Wang 已提交
22
typedef std::vector<int> Ints;
23

F
fengjiayi 已提交
24
enum class OpArgType { IN, OUT };
25

Y
Yi Wang 已提交
26 27
const Ints* AttrFormat(const AttributeMap& attrs, const std::string& key) {
  return (attrs.count(key) > 0) ? &boost::get<Ints>(attrs.at(key)) : nullptr;
28 29
}

Y
Yi Wang 已提交
30 31
Ints* AttrFormat(AttributeMap& attrs, const std::string& key) {
  return (attrs.count(key) > 0) ? &boost::get<Ints>(attrs.at(key)) : nullptr;
32 33
}

Y
Yi Wang 已提交
34 35 36 37 38 39
static void TransOpArg(const OperatorBase* src_op,
                       std::vector<std::string>& grad_inputs,
                       std::vector<std::string>& grad_outputs,
                       AttributeMap& grad_attrs,
                       std::unordered_map<std::string, int>& grad_idxs,
                       const std::string& src_type, const std::string& dst_type,
40 41
                       int& idx, bool is_grad) {
  const std::vector<std::string>& src_inout =
Y
Yi Wang 已提交
42 43 44
      (src_type == "input_format") ? src_op->inputs_ : src_op->outputs_;

  const std::vector<int>* src_format = AttrFormat(src_op->Attrs(), src_type);
45 46

  std::vector<std::string>& dst_inout =
Y
Yi Wang 已提交
47 48 49 50
      (dst_type == "input_format") ? grad_inputs : grad_outputs;

  std::vector<int>* dst_format = AttrFormat(grad_attrs, dst_type);

F
fengjiayi 已提交
51
  const OpProto& proto = *(OpRegistry::op_info_map().at(src_op->type_).proto_);
Y
Yi Wang 已提交
52

F
fengjiayi 已提交
53
  const auto& src_arg_list =
Y
Yi Wang 已提交
54
      (src_type == "input_format") ? proto.inputs() : proto.outputs();
55 56 57

  for (const auto& arg : src_arg_list) {
    std::string src_name = arg.name();
Y
Yi Wang 已提交
58
    std::string dst_name = is_grad ? src_name + kGradVarSuffix : src_name;
Y
Yi Wang 已提交
59
    grad_idxs[dst_name] = idx++;
F
fengjiayi 已提交
60
    int src_arg_idx = src_op->in_out_idxs_->at(src_name);
61 62 63 64 65
    int src_begin =
        src_format == nullptr ? src_arg_idx : src_format->at(src_arg_idx);
    int src_end = src_format == nullptr ? src_arg_idx + 1
                                        : src_format->at(src_arg_idx + 1);
    for (int i = src_begin; i < src_end; ++i) {
Y
Yi Wang 已提交
66 67 68
      std::string s =
          is_grad ? src_inout[i] + kGradVarSuffix
                  : (arg.ignore_gradient() ? kEmptyVarName : src_inout[i]);
69 70 71 72
      dst_inout.emplace_back(s);
    }
    if (dst_format != nullptr) {
      dst_format->push_back(dst_inout.size());
73 74 75 76
    }
  }
}

77
OperatorBase* BuildGradOp(const OperatorBase* op) {
F
fengjiayi 已提交
78
  auto it = OpRegistry::op_info_map().find(op->type_);
F
WIP  
fengjiayi 已提交
79
  PADDLE_ENFORCE(it != OpRegistry::op_info_map().end(),
F
fengjiayi 已提交
80
                 "'%s' has not been registered.", op->type_);
F
WIP  
fengjiayi 已提交
81 82
  std::string grad_op_type = it->second.grad_op_type_;
  PADDLE_ENFORCE(!grad_op_type.empty(), "'%s' has no gradient operator.",
F
fengjiayi 已提交
83
                 op->type_);
Y
Yi Wang 已提交
84 85 86 87 88 89

  AttributeMap grad_attrs(op->Attrs());
  grad_attrs.erase("input_format");
  grad_attrs.erase("output_format");
  if (op->Attrs().count("input_format") > 0) {
    grad_attrs["output_format"] = std::vector<int>({0});
90
  }
Y
Yi Wang 已提交
91 92 93
  if (op->Attrs().count("input_format") > 0 ||
      op->Attrs().count("output_format") > 0) {
    grad_attrs["input_format"] = std::vector<int>({0});
94
  }
Y
Yi Wang 已提交
95 96

  std::vector<std::string> grad_inputs, grad_outputs;
Y
Update  
Yi Wang 已提交
97 98 99

  using VarIndexMap = std::unordered_map<std::string, int>;
  VarIndexMap* grad_idxs = new VarIndexMap;
100 101
  int in_idx = 0;
  int out_idx = 0;
Y
Update  
Yi Wang 已提交
102
  TransOpArg(op, grad_inputs, grad_outputs, grad_attrs, *grad_idxs,
Y
Yi Wang 已提交
103
             "input_format", "input_format", in_idx, false);  // I
Y
Update  
Yi Wang 已提交
104
  TransOpArg(op, grad_inputs, grad_outputs, grad_attrs, *grad_idxs,
Y
Yi Wang 已提交
105
             "output_format", "input_format", in_idx, false);  // G
Y
Update  
Yi Wang 已提交
106
  TransOpArg(op, grad_inputs, grad_outputs, grad_attrs, *grad_idxs,
Y
Yi Wang 已提交
107
             "output_format", "input_format", in_idx, true);  // OG
Y
Update  
Yi Wang 已提交
108
  TransOpArg(op, grad_inputs, grad_outputs, grad_attrs, *grad_idxs,
Y
Yi Wang 已提交
109 110
             "input_format", "output_format", out_idx, true);  // IG

F
fengjiayi 已提交
111
  it = OpRegistry::op_info_map().find(grad_op_type);
F
WIP  
fengjiayi 已提交
112 113 114
  PADDLE_ENFORCE(it != OpRegistry::op_info_map().end(),
                 "'%s' has not been registered.", grad_op_type);
  OperatorBase* grad_op = it->second.creator_();
Y
Yi Wang 已提交
115

116
  grad_op->type_ = grad_op_type;
Y
Update  
Yi Wang 已提交
117 118 119 120
  grad_op->inputs_ = grad_inputs;
  grad_op->outputs_ = grad_outputs;
  grad_op->attrs_ = grad_attrs;
  grad_op->in_out_idxs_.reset(grad_idxs);
Y
Yi Wang 已提交
121

122
  return grad_op;
123 124 125
}

}  // namespace framework
F
fengjiayi 已提交
126
}  // namespace paddle