tracer.cc 12.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "paddle/fluid/imperative/tracer.h"

M
minqiyang 已提交
17
#include <memory>
M
minqiyang 已提交
18
#include <set>
M
minqiyang 已提交
19 20
#include <unordered_map>
#include <unordered_set>
M
minqiyang 已提交
21

M
minqiyang 已提交
22
#include "paddle/fluid/framework/var_type_inference.h"
M
minqiyang 已提交
23 24 25 26
#include "paddle/fluid/operators/math/math_function.h"
#include "paddle/fluid/platform/device_context.h"
#include "paddle/fluid/platform/enforce.h"

27
namespace paddle {
M
minqiyang 已提交
28 29 30 31 32
namespace imperative {

void CreateGradOp(const framework::OpDesc& op_desc,
                  const std::unordered_set<std::string>& no_grad_set,
                  const std::vector<framework::BlockDesc*>& grad_sub_block,
X
Xin Pan 已提交
33
                  std::vector<framework::OpDesc*>* grad_op_descs,
M
minqiyang 已提交
34
                  std::unordered_map<std::string, std::string>* grad_to_var) {
X
Xin Pan 已提交
35
  PADDLE_ENFORCE(grad_op_descs->empty());
X
Xin Pan 已提交
36 37 38
  const framework::OpInfo& op_info =
      framework::OpInfoMap::Instance().Get(op_desc.Type());
  if (!op_info.grad_op_maker_) return;
J
JiabinYang 已提交
39

X
Xin Pan 已提交
40 41
  std::vector<std::unique_ptr<framework::OpDesc>> descs =
      op_info.GradOpMaker()(op_desc, no_grad_set, grad_to_var, grad_sub_block);
X
Xin Pan 已提交
42 43 44
  for (auto& desc : descs) {
    grad_op_descs->emplace_back(desc.release());
  }
M
minqiyang 已提交
45 46
}

47 48
void InitGrad(VarBase* var, platform::DeviceContext* dev_ctx) {
  PADDLE_ENFORCE_NOT_NULL(var, "Could not get valid var base");
M
minqiyang 已提交
49 50
  PADDLE_ENFORCE_NOT_NULL(dev_ctx,
                          "Could not get valid device from forward op");
51 52 53 54 55 56 57 58 59

  if (var->grads_ == nullptr) {
    auto& var_t = var->var_->Get<framework::LoDTensor>();
    var->grads_ = new VarBase(var->GradName(), framework::proto::VarType::FP32,
                              framework::vectorize(var_t.dims()),
                              dev_ctx->GetPlace(), true, false);
    auto grad_t = var->grads_->var_->GetMutable<framework::LoDTensor>();
    operators::math::set_constant(*dev_ctx, grad_t, 0.0);
  }
M
minqiyang 已提交
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77
}

platform::Place GetExpectedPlace(platform::Place place, VarBasePtrMap inputs) {
  platform::Place result = place;
  for (auto it : inputs) {
    for (VarBase* var : it.second) {
      platform::Place tmp_place =
          var->var_->Get<framework::LoDTensor>().place();
      if (!platform::is_same_place(tmp_place, result)) {
        PADDLE_THROW(
            "Input variable should keep in the same place: %s, but get place: "
            "%s of input %s instead",
            result, tmp_place, it.first);
      }
    }
  }

  return result;
M
minqiyang 已提交
78 79
}

80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
framework::VariableNameMap CreateInputVarNameMap(
    const OpBase* op, const VarBasePtrMap& varbase_map) {
  framework::VariableNameMap result;

  auto& info_map = framework::OpInfoMap::Instance();
  auto* op_info = info_map.GetNullable(op->Type());
  if (op_info == nullptr || op_info->proto_ == nullptr) {
    return result;
  }

  for (auto& in : op_info->Proto().inputs()) {
    auto it = varbase_map.find(in.name());
    if (it == varbase_map.end()) {
      PADDLE_ENFORCE(in.dispensable());
      result[in.name()] = {};
    } else {
      auto var_vector = it->second;
      std::vector<std::string> args;
      args.reserve(var_vector.size());
      for (VarBase* var_base : var_vector) {
        args.emplace_back(var_base->Name());
      }
      result[in.name()] = args;
    }
  }
  return result;
}

framework::VariableNameMap CreateOutputVarNameMap(
    const OpBase* op, const VarBasePtrMap& varbase_map) {
  framework::VariableNameMap result;

  auto& info_map = framework::OpInfoMap::Instance();
  auto* op_info = info_map.GetNullable(op->Type());
  if (op_info == nullptr || op_info->proto_ == nullptr) {
    return result;
  }

  for (auto& out : op_info->Proto().outputs()) {
    auto it = varbase_map.find(out.name());
    if (it == varbase_map.end()) {
      PADDLE_ENFORCE(out.dispensable());
      result[out.name()] = {};
    } else {
      auto var_vector = it->second;
      std::vector<std::string> args;
      args.reserve(var_vector.size());
      for (VarBase* var_base : var_vector) {
        args.emplace_back(var_base->Name());
      }
      result[out.name()] = args;
    }
  }
  return result;
}

M
minqiyang 已提交
136
Tracer::Tracer(framework::BlockDesc* root_block) : root_block_(root_block) {}
137

M
minqiyang 已提交
138
std::set<std::string> Tracer::Trace(OpBase* op, const VarBasePtrMap& inputs,
M
minqiyang 已提交
139
                                    VarBasePtrMap* outputs,
140
                                    framework::AttributeMap attrs_map,
M
minqiyang 已提交
141 142
                                    const platform::Place expected_place,
                                    const bool stop_gradient) {
M
minqiyang 已提交
143 144 145
  framework::VariableValueMap invars_map;
  framework::VariableValueMap outvars_map;

146 147
  // Construct input_vars_map and output_vars_map
  std::map<std::string, VarBase*> current_vars_map;
M
minqiyang 已提交
148 149 150
  op->input_vars_ = inputs;
  for (auto it : op->input_vars_) {
    auto& invars = invars_map[it.first];
M
minqiyang 已提交
151
    invars.reserve(it.second.size());
M
minqiyang 已提交
152
    for (VarBase* inp : it.second) {
153 154
      PADDLE_ENFORCE_NOT_NULL(inp->var_, "op %s input %s nullptr", op->Type(),
                              inp->Name());
M
minqiyang 已提交
155

M
minqiyang 已提交
156
      invars.emplace_back(inp->var_);
157 158
      if (!stop_gradient) {
        current_vars_map[inp->Name()] = inp;
M
minqiyang 已提交
159
      }
160 161 162
      VLOG(3) << "input var name: " << inp->Name()
              << " inited: " << inp->var_->IsInitialized()
              << " stop_grad: " << inp->IsStopGradient();
M
minqiyang 已提交
163
    }
M
minqiyang 已提交
164
    op->TrackPreOp(it.first, it.second);
M
minqiyang 已提交
165 166
  }

M
minqiyang 已提交
167
  op->output_vars_ = *outputs;
M
minqiyang 已提交
168 169 170
  for (auto it : op->output_vars_) {
    auto& outvars = outvars_map[it.first];
    const std::vector<VarBase*>& outputs = it.second;
M
minqiyang 已提交
171
    outvars.reserve(outputs.size());
172
    for (size_t i = 0U; i < outputs.size(); ++i) {
M
minqiyang 已提交
173
      VarBase* out = outputs[i];
M
minqiyang 已提交
174
      outvars.emplace_back(out->var_);
X
Xin Pan 已提交
175
      out->TrackPreOp(op, it.first, i, stop_gradient);
176 177 178
      if (!stop_gradient) {
        current_vars_map[out->Name()] = out;
      }
M
minqiyang 已提交
179

180
      VLOG(3) << "output var name: " << out->Name()
181 182
              << " inited: " << out->var_->IsInitialized()
              << " stop_grad: " << out->IsStopGradient();
M
minqiyang 已提交
183 184 185
    }
  }

186 187 188 189
  // Check attrs and create op
  framework::VariableNameMap invars_name_map =
      CreateInputVarNameMap(op, inputs);
  framework::VariableNameMap outvars_name_map =
M
minqiyang 已提交
190
      CreateOutputVarNameMap(op, *outputs);
191 192 193 194 195 196 197 198 199 200

  auto& info = framework::OpInfoMap::Instance().Get(op->Type());
  if (info.Checker() != nullptr) {
    info.Checker()->Check(&attrs_map);
  }

  std::unique_ptr<framework::OperatorBase> op_base =
      framework::OpRegistry::CreateOp(op->Type(), invars_name_map,
                                      outvars_name_map, attrs_map);

M
minqiyang 已提交
201
  if (info.infer_var_type_) {
M
minqiyang 已提交
202
    RuntimeInferVarTypeContext infer_var_type_ctx(&inputs, outputs, &attrs_map);
M
minqiyang 已提交
203
    info.infer_var_type_(&infer_var_type_ctx);
M
minqiyang 已提交
204 205
  }

206 207 208
  // TODO(minqiyang): Support infer var type in imperative mode
  // Run forward op
  VLOG(3) << "tracer running " << op->Type();
M
minqiyang 已提交
209 210 211 212 213 214 215 216
  framework::RuntimeContext ctx(invars_map, outvars_map);

  // TODO(panyx0718): Cache p.
  framework::OperatorWithKernel* op_kernel =
      dynamic_cast<framework::OperatorWithKernel*>(op_base.get());
  PADDLE_ENFORCE_NOT_NULL(op_kernel, "only support op with kernel");

  framework::Scope scope;
P
Paddle CI 已提交
217
  op->place_ = GetExpectedPlace(expected_place, inputs);
218

P
Paddle CI 已提交
219 220
  PreparedOp prepared_op = PreparedOp::Prepare(ctx, *op_kernel, op->place_);
  prepared_op.op.RuntimeInferShape(scope, op->place_, ctx);
X
polish  
Xin Pan 已提交
221 222 223
  prepared_op.func(
      framework::ExecutionContext(prepared_op.op, scope, *prepared_op.dev_ctx,
                                  prepared_op.ctx, prepared_op.kernel_configs));
M
minqiyang 已提交
224

225
  // construct backward op
M
minqiyang 已提交
226
  std::set<std::string> vars_saved_for_backward;
M
minqiyang 已提交
227
  if (!stop_gradient) {
228 229 230
    VLOG(5) << "start construct backward op";

    // construct grad op descs
M
minqiyang 已提交
231
    op->attrs_ = attrs_map;
232 233
    std::unique_ptr<framework::OpDesc> fwd_op_desc(new framework::OpDesc(
        op->Type(), invars_name_map, outvars_name_map, attrs_map));
234 235
    std::unique_ptr<std::unordered_map<std::string, std::string>> grad_to_var(
        new std::unordered_map<std::string, std::string>());
236 237 238
    // NOTE(minqiyang): We don't support control flow op in imperative now
    // Add grad_block_ when we want to support it
    CreateGradOp(*fwd_op_desc, {}, {}, &op->grad_op_descs_, grad_to_var.get());
X
Xin Pan 已提交
239

240
    VLOG(5) << "create grad op desc: " << op->grad_op_descs_[0]->Type();
M
minqiyang 已提交
241

242 243 244 245 246 247
    const size_t grad_op_count = op->grad_op_descs_.size();

    op->grad_input_vars_.resize(grad_op_count);
    op->grad_output_vars_.resize(grad_op_count);

    for (size_t i = 0; i < grad_op_count; ++i) {
X
Xin Pan 已提交
248 249 250
      framework::OpDesc* grad_op_desc = op->grad_op_descs_[i];
      for (auto it : grad_op_desc->Inputs()) {
        auto& grad_in_vars = op->grad_input_vars_[i][it.first];
251
        grad_in_vars.reserve(it.second.size());
X
Xin Pan 已提交
252 253 254
        for (const std::string& grad_invar : it.second) {
          auto var_it = grad_to_var->find(grad_invar);
          if (var_it == grad_to_var->end()) {
255 256
            auto fwd_var_it = current_vars_map.find(grad_invar);
            PADDLE_ENFORCE(fwd_var_it != current_vars_map.end());
X
Xin Pan 已提交
257
            // Forward inputs or outputs.
M
minqiyang 已提交
258
            grad_in_vars.emplace_back(fwd_var_it->second);
X
Xin Pan 已提交
259
          } else {
260 261
            VarBase* var = current_vars_map[var_it->second];
            InitGrad(var, prepared_op.GetDeviceContext());
X
Xin Pan 已提交
262
            // Douts.
M
minqiyang 已提交
263
            grad_in_vars.emplace_back(var->grads_);
X
Xin Pan 已提交
264
          }
M
minqiyang 已提交
265 266

          vars_saved_for_backward.insert(it.first);
X
Xin Pan 已提交
267 268 269 270 271 272 273 274 275 276
        }
      }

      for (auto it : grad_op_desc->Outputs()) {
        auto& grad_out_vars = op->grad_output_vars_[i][it.first];
        for (const std::string& grad_outvar : it.second) {
          auto var_it = grad_to_var->find(grad_outvar);
          PADDLE_ENFORCE(var_it != grad_to_var->end(),
                         "Could not found the grad op output var, should this "
                         "operator %s's stop gradient be True",
277 278 279
                         op->Type());
          VarBase* var = current_vars_map[var_it->second];
          InitGrad(var, prepared_op.GetDeviceContext());
M
minqiyang 已提交
280
          grad_out_vars.push_back(var->grads_);
281
          VLOG(3) << "grads output var name: " << var->name_;
M
minqiyang 已提交
282 283 284 285 286
        }
      }
    }
  }

M
minqiyang 已提交
287
  return vars_saved_for_backward;
M
minqiyang 已提交
288 289
}

290 291 292
std::vector<VarBase*> Tracer::PyTrace(OpBase* op,
                                      const std::vector<VarBase*>& inputs,
                                      bool stop_gradient) {
293 294
  VLOG(3) << "py_trace " << op->Type();

X
Xin Pan 已提交
295
  op->input_vars_[PyLayer::kFwdInp] = inputs;
296 297 298 299

  std::vector<framework::Variable*> ret_vars =
      PyLayer::Apply(op->forward_id_, inputs);

M
minqiyang 已提交
300
  op->TrackPreOp(PyLayer::kFwdInp, inputs);
301

302 303 304 305 306 307 308
  std::vector<VarBase*>& outputs = op->output_vars_[PyLayer::kFwdOut];
  outputs.reserve(ret_vars.size());
  for (size_t i = 0U; i != ret_vars.size(); ++i) {
    framework::Variable* v = ret_vars[i];
    VarBase* out = new VarBase(string::Sprintf("%s_out_%d", op->Type(), i), v,
                               nullptr, stop_gradient);
    outputs.emplace_back(out);
X
Xin Pan 已提交
309
    out->TrackPreOp(op, PyLayer::kFwdOut, i, stop_gradient);
310
  }
311

312
  if (!stop_gradient) {
313
    VLOG(5) << "start construct backward op";
X
Xin Pan 已提交
314 315
    op->grad_input_vars_.resize(1);
    op->grad_output_vars_.resize(1);
X
Xin Pan 已提交
316
    auto& grad_input_vars =
X
Xin Pan 已提交
317
        op->grad_input_vars_[0][framework::GradVarName(PyLayer::kFwdInp)];
X
Xin Pan 已提交
318
    auto& grad_output_vars =
X
Xin Pan 已提交
319
        op->grad_output_vars_[0][framework::GradVarName(PyLayer::kFwdOut)];
320

M
minqiyang 已提交
321 322
    for (VarBase* inp : inputs) {
      grad_input_vars.push_back(inp);
323 324
    }
    for (VarBase* out : outputs) {
M
minqiyang 已提交
325
      grad_input_vars.push_back(out);
326
    }
M
minqiyang 已提交
327

328
    // TODO(minqiyang): Add GPU support for PyLayer, only support CPU now
M
minqiyang 已提交
329
    platform::CPUPlace place;
330
    for (VarBase* out : outputs) {
331
      InitGrad(out, platform::DeviceContextPool::Instance().Get(place));
M
minqiyang 已提交
332
      grad_input_vars.push_back(out->grads_);
333
    }
M
minqiyang 已提交
334

335 336
    for (VarBase* inp : inputs) {
      InitGrad(inp, platform::DeviceContextPool::Instance().Get(place));
M
minqiyang 已提交
337
      grad_output_vars.push_back(inp->grads_);
338 339 340 341 342
    }
  }
  return outputs;
}

M
minqiyang 已提交
343
}  // namespace imperative
344
}  // namespace paddle