tracer.cc 12.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "paddle/fluid/imperative/tracer.h"

M
minqiyang 已提交
17
#include <memory>
M
minqiyang 已提交
18
#include <set>
M
minqiyang 已提交
19 20
#include <unordered_map>
#include <unordered_set>
21
#include <utility>
M
minqiyang 已提交
22

M
minqiyang 已提交
23
#include "paddle/fluid/framework/var_type_inference.h"
M
minqiyang 已提交
24 25 26 27
#include "paddle/fluid/operators/math/math_function.h"
#include "paddle/fluid/platform/device_context.h"
#include "paddle/fluid/platform/enforce.h"

28
namespace paddle {
M
minqiyang 已提交
29 30 31 32 33
namespace imperative {

void CreateGradOp(const framework::OpDesc& op_desc,
                  const std::unordered_set<std::string>& no_grad_set,
                  const std::vector<framework::BlockDesc*>& grad_sub_block,
X
Xin Pan 已提交
34
                  std::vector<framework::OpDesc*>* grad_op_descs,
M
minqiyang 已提交
35
                  std::unordered_map<std::string, std::string>* grad_to_var) {
X
Xin Pan 已提交
36
  PADDLE_ENFORCE(grad_op_descs->empty());
X
Xin Pan 已提交
37 38 39
  const framework::OpInfo& op_info =
      framework::OpInfoMap::Instance().Get(op_desc.Type());
  if (!op_info.grad_op_maker_) return;
J
JiabinYang 已提交
40

X
Xin Pan 已提交
41 42
  std::vector<std::unique_ptr<framework::OpDesc>> descs =
      op_info.GradOpMaker()(op_desc, no_grad_set, grad_to_var, grad_sub_block);
X
Xin Pan 已提交
43 44 45
  for (auto& desc : descs) {
    grad_op_descs->emplace_back(desc.release());
  }
M
minqiyang 已提交
46 47
}

48 49
void InitGrad(VarBase* var, platform::DeviceContext* dev_ctx) {
  PADDLE_ENFORCE_NOT_NULL(var, "Could not get valid var base");
M
minqiyang 已提交
50 51
  PADDLE_ENFORCE_NOT_NULL(dev_ctx,
                          "Could not get valid device from forward op");
52 53 54 55 56 57 58 59 60

  if (var->grads_ == nullptr) {
    auto& var_t = var->var_->Get<framework::LoDTensor>();
    var->grads_ = new VarBase(var->GradName(), framework::proto::VarType::FP32,
                              framework::vectorize(var_t.dims()),
                              dev_ctx->GetPlace(), true, false);
    auto grad_t = var->grads_->var_->GetMutable<framework::LoDTensor>();
    operators::math::set_constant(*dev_ctx, grad_t, 0.0);
  }
M
minqiyang 已提交
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
}

platform::Place GetExpectedPlace(platform::Place place, VarBasePtrMap inputs) {
  platform::Place result = place;
  for (auto it : inputs) {
    for (VarBase* var : it.second) {
      platform::Place tmp_place =
          var->var_->Get<framework::LoDTensor>().place();
      if (!platform::is_same_place(tmp_place, result)) {
        PADDLE_THROW(
            "Input variable should keep in the same place: %s, but get place: "
            "%s of input %s instead",
            result, tmp_place, it.first);
      }
    }
  }

  return result;
M
minqiyang 已提交
79 80
}

81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136
framework::VariableNameMap CreateInputVarNameMap(
    const OpBase* op, const VarBasePtrMap& varbase_map) {
  framework::VariableNameMap result;

  auto& info_map = framework::OpInfoMap::Instance();
  auto* op_info = info_map.GetNullable(op->Type());
  if (op_info == nullptr || op_info->proto_ == nullptr) {
    return result;
  }

  for (auto& in : op_info->Proto().inputs()) {
    auto it = varbase_map.find(in.name());
    if (it == varbase_map.end()) {
      PADDLE_ENFORCE(in.dispensable());
      result[in.name()] = {};
    } else {
      auto var_vector = it->second;
      std::vector<std::string> args;
      args.reserve(var_vector.size());
      for (VarBase* var_base : var_vector) {
        args.emplace_back(var_base->Name());
      }
      result[in.name()] = args;
    }
  }
  return result;
}

framework::VariableNameMap CreateOutputVarNameMap(
    const OpBase* op, const VarBasePtrMap& varbase_map) {
  framework::VariableNameMap result;

  auto& info_map = framework::OpInfoMap::Instance();
  auto* op_info = info_map.GetNullable(op->Type());
  if (op_info == nullptr || op_info->proto_ == nullptr) {
    return result;
  }

  for (auto& out : op_info->Proto().outputs()) {
    auto it = varbase_map.find(out.name());
    if (it == varbase_map.end()) {
      PADDLE_ENFORCE(out.dispensable());
      result[out.name()] = {};
    } else {
      auto var_vector = it->second;
      std::vector<std::string> args;
      args.reserve(var_vector.size());
      for (VarBase* var_base : var_vector) {
        args.emplace_back(var_base->Name());
      }
      result[out.name()] = args;
    }
  }
  return result;
}

M
minqiyang 已提交
137
Tracer::Tracer(framework::BlockDesc* root_block) : root_block_(root_block) {}
138

M
minqiyang 已提交
139
std::set<std::string> Tracer::Trace(OpBase* op, const VarBasePtrMap& inputs,
M
minqiyang 已提交
140
                                    VarBasePtrMap* outputs,
141
                                    framework::AttributeMap attrs_map,
M
minqiyang 已提交
142 143
                                    const platform::Place expected_place,
                                    const bool stop_gradient) {
M
minqiyang 已提交
144 145 146
  framework::VariableValueMap invars_map;
  framework::VariableValueMap outvars_map;

147 148
  // Construct input_vars_map and output_vars_map
  std::map<std::string, VarBase*> current_vars_map;
M
minqiyang 已提交
149 150 151
  op->input_vars_ = inputs;
  for (auto it : op->input_vars_) {
    auto& invars = invars_map[it.first];
M
minqiyang 已提交
152
    invars.reserve(it.second.size());
M
minqiyang 已提交
153
    for (VarBase* inp : it.second) {
154 155
      PADDLE_ENFORCE_NOT_NULL(inp->var_, "op %s input %s nullptr", op->Type(),
                              inp->Name());
M
minqiyang 已提交
156

157
      invars.emplace_back(inp->var_.get());
158 159
      if (!stop_gradient) {
        current_vars_map[inp->Name()] = inp;
M
minqiyang 已提交
160
      }
161 162 163
      VLOG(3) << "input var name: " << inp->Name()
              << " inited: " << inp->var_->IsInitialized()
              << " stop_grad: " << inp->IsStopGradient();
M
minqiyang 已提交
164
    }
M
minqiyang 已提交
165
    op->TrackPreOp(it.first, it.second);
M
minqiyang 已提交
166 167
  }

M
minqiyang 已提交
168
  op->output_vars_ = *outputs;
M
minqiyang 已提交
169 170 171
  for (auto it : op->output_vars_) {
    auto& outvars = outvars_map[it.first];
    const std::vector<VarBase*>& outputs = it.second;
M
minqiyang 已提交
172
    outvars.reserve(outputs.size());
173
    for (size_t i = 0U; i < outputs.size(); ++i) {
M
minqiyang 已提交
174
      VarBase* out = outputs[i];
175
      outvars.emplace_back(out->var_.get());
X
Xin Pan 已提交
176
      out->TrackPreOp(op, it.first, i, stop_gradient);
177 178 179
      if (!stop_gradient) {
        current_vars_map[out->Name()] = out;
      }
M
minqiyang 已提交
180

181
      VLOG(3) << "output var name: " << out->Name()
182 183
              << " inited: " << out->var_->IsInitialized()
              << " stop_grad: " << out->IsStopGradient();
M
minqiyang 已提交
184 185 186
    }
  }

187 188 189 190
  // Check attrs and create op
  framework::VariableNameMap invars_name_map =
      CreateInputVarNameMap(op, inputs);
  framework::VariableNameMap outvars_name_map =
M
minqiyang 已提交
191
      CreateOutputVarNameMap(op, *outputs);
192 193 194 195 196 197 198 199 200 201

  auto& info = framework::OpInfoMap::Instance().Get(op->Type());
  if (info.Checker() != nullptr) {
    info.Checker()->Check(&attrs_map);
  }

  std::unique_ptr<framework::OperatorBase> op_base =
      framework::OpRegistry::CreateOp(op->Type(), invars_name_map,
                                      outvars_name_map, attrs_map);

M
minqiyang 已提交
202
  if (info.infer_var_type_) {
M
minqiyang 已提交
203
    RuntimeInferVarTypeContext infer_var_type_ctx(&inputs, outputs, &attrs_map);
M
minqiyang 已提交
204
    info.infer_var_type_(&infer_var_type_ctx);
M
minqiyang 已提交
205 206
  }

207 208 209
  // TODO(minqiyang): Support infer var type in imperative mode
  // Run forward op
  VLOG(3) << "tracer running " << op->Type();
M
minqiyang 已提交
210 211 212 213 214 215 216 217
  framework::RuntimeContext ctx(invars_map, outvars_map);

  // TODO(panyx0718): Cache p.
  framework::OperatorWithKernel* op_kernel =
      dynamic_cast<framework::OperatorWithKernel*>(op_base.get());
  PADDLE_ENFORCE_NOT_NULL(op_kernel, "only support op with kernel");

  framework::Scope scope;
P
Paddle CI 已提交
218
  op->place_ = GetExpectedPlace(expected_place, inputs);
219

P
Paddle CI 已提交
220 221
  PreparedOp prepared_op = PreparedOp::Prepare(ctx, *op_kernel, op->place_);
  prepared_op.op.RuntimeInferShape(scope, op->place_, ctx);
X
polish  
Xin Pan 已提交
222 223 224
  prepared_op.func(
      framework::ExecutionContext(prepared_op.op, scope, *prepared_op.dev_ctx,
                                  prepared_op.ctx, prepared_op.kernel_configs));
M
minqiyang 已提交
225

226
  // construct backward op
M
minqiyang 已提交
227
  std::set<std::string> vars_saved_for_backward;
M
minqiyang 已提交
228
  if (!stop_gradient) {
229 230 231
    VLOG(5) << "start construct backward op";

    // construct grad op descs
M
minqiyang 已提交
232
    op->attrs_ = attrs_map;
233 234
    std::unique_ptr<framework::OpDesc> fwd_op_desc(new framework::OpDesc(
        op->Type(), invars_name_map, outvars_name_map, attrs_map));
235 236
    std::unique_ptr<std::unordered_map<std::string, std::string>> grad_to_var(
        new std::unordered_map<std::string, std::string>());
237 238 239
    // NOTE(minqiyang): We don't support control flow op in imperative now
    // Add grad_block_ when we want to support it
    CreateGradOp(*fwd_op_desc, {}, {}, &op->grad_op_descs_, grad_to_var.get());
X
Xin Pan 已提交
240

241
    VLOG(5) << "create grad op desc: " << op->grad_op_descs_[0]->Type();
M
minqiyang 已提交
242

243 244 245 246 247 248
    const size_t grad_op_count = op->grad_op_descs_.size();

    op->grad_input_vars_.resize(grad_op_count);
    op->grad_output_vars_.resize(grad_op_count);

    for (size_t i = 0; i < grad_op_count; ++i) {
X
Xin Pan 已提交
249 250 251
      framework::OpDesc* grad_op_desc = op->grad_op_descs_[i];
      for (auto it : grad_op_desc->Inputs()) {
        auto& grad_in_vars = op->grad_input_vars_[i][it.first];
252
        grad_in_vars.reserve(it.second.size());
X
Xin Pan 已提交
253 254 255
        for (const std::string& grad_invar : it.second) {
          auto var_it = grad_to_var->find(grad_invar);
          if (var_it == grad_to_var->end()) {
256 257
            auto fwd_var_it = current_vars_map.find(grad_invar);
            PADDLE_ENFORCE(fwd_var_it != current_vars_map.end());
X
Xin Pan 已提交
258
            // Forward inputs or outputs.
M
minqiyang 已提交
259
            grad_in_vars.emplace_back(fwd_var_it->second);
X
Xin Pan 已提交
260
          } else {
261 262
            VarBase* var = current_vars_map[var_it->second];
            InitGrad(var, prepared_op.GetDeviceContext());
X
Xin Pan 已提交
263
            // Douts.
M
minqiyang 已提交
264
            grad_in_vars.emplace_back(var->grads_);
X
Xin Pan 已提交
265
          }
M
minqiyang 已提交
266 267

          vars_saved_for_backward.insert(it.first);
X
Xin Pan 已提交
268 269 270 271 272 273 274 275 276 277
        }
      }

      for (auto it : grad_op_desc->Outputs()) {
        auto& grad_out_vars = op->grad_output_vars_[i][it.first];
        for (const std::string& grad_outvar : it.second) {
          auto var_it = grad_to_var->find(grad_outvar);
          PADDLE_ENFORCE(var_it != grad_to_var->end(),
                         "Could not found the grad op output var, should this "
                         "operator %s's stop gradient be True",
278 279 280
                         op->Type());
          VarBase* var = current_vars_map[var_it->second];
          InitGrad(var, prepared_op.GetDeviceContext());
M
minqiyang 已提交
281
          grad_out_vars.push_back(var->grads_);
282
          VLOG(3) << "grads output var name: " << var->name_;
M
minqiyang 已提交
283 284 285 286 287
        }
      }
    }
  }

M
minqiyang 已提交
288
  return vars_saved_for_backward;
M
minqiyang 已提交
289 290
}

291 292 293
std::vector<VarBase*> Tracer::PyTrace(OpBase* op,
                                      const std::vector<VarBase*>& inputs,
                                      bool stop_gradient) {
294 295
  VLOG(3) << "py_trace " << op->Type();

X
Xin Pan 已提交
296
  op->input_vars_[PyLayer::kFwdInp] = inputs;
297

298
  std::vector<std::unique_ptr<framework::Variable>> ret_vars =
299
      PyLayer::Apply(op->forward_id_, inputs);
M
minqiyang 已提交
300
  op->TrackPreOp(PyLayer::kFwdInp, inputs);
301

302 303 304
  std::vector<VarBase*>& outputs = op->output_vars_[PyLayer::kFwdOut];
  outputs.reserve(ret_vars.size());
  for (size_t i = 0U; i != ret_vars.size(); ++i) {
305 306
    VarBase* out = new VarBase(string::Sprintf("%s_out_%d", op->Type(), i),
                               std::move(ret_vars[i]), nullptr, stop_gradient);
307
    outputs.emplace_back(out);
X
Xin Pan 已提交
308
    out->TrackPreOp(op, PyLayer::kFwdOut, i, stop_gradient);
309
  }
310

311
  if (!stop_gradient) {
312
    VLOG(5) << "start construct backward op";
X
Xin Pan 已提交
313 314
    op->grad_input_vars_.resize(1);
    op->grad_output_vars_.resize(1);
X
Xin Pan 已提交
315
    auto& grad_input_vars =
X
Xin Pan 已提交
316
        op->grad_input_vars_[0][framework::GradVarName(PyLayer::kFwdInp)];
X
Xin Pan 已提交
317
    auto& grad_output_vars =
X
Xin Pan 已提交
318
        op->grad_output_vars_[0][framework::GradVarName(PyLayer::kFwdOut)];
319

M
minqiyang 已提交
320 321
    for (VarBase* inp : inputs) {
      grad_input_vars.push_back(inp);
322 323
    }
    for (VarBase* out : outputs) {
M
minqiyang 已提交
324
      grad_input_vars.push_back(out);
325
    }
M
minqiyang 已提交
326

327
    // TODO(minqiyang): Add GPU support for PyLayer, only support CPU now
M
minqiyang 已提交
328
    platform::CPUPlace place;
329
    for (VarBase* out : outputs) {
330
      InitGrad(out, platform::DeviceContextPool::Instance().Get(place));
M
minqiyang 已提交
331
      grad_input_vars.push_back(out->grads_);
332
    }
M
minqiyang 已提交
333

334 335
    for (VarBase* inp : inputs) {
      InitGrad(inp, platform::DeviceContextPool::Instance().Get(place));
M
minqiyang 已提交
336
      grad_output_vars.push_back(inp->grads_);
337 338 339 340 341
    }
  }
  return outputs;
}

M
minqiyang 已提交
342
}  // namespace imperative
343
}  // namespace paddle