tracer.cc 11.9 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "paddle/fluid/imperative/tracer.h"

M
minqiyang 已提交
17
#include <memory>
M
minqiyang 已提交
18
#include <set>
M
minqiyang 已提交
19 20
#include <unordered_map>
#include <unordered_set>
M
minqiyang 已提交
21

M
minqiyang 已提交
22 23 24 25
#include "paddle/fluid/operators/math/math_function.h"
#include "paddle/fluid/platform/device_context.h"
#include "paddle/fluid/platform/enforce.h"

26
namespace paddle {
M
minqiyang 已提交
27 28 29 30 31
namespace imperative {

void CreateGradOp(const framework::OpDesc& op_desc,
                  const std::unordered_set<std::string>& no_grad_set,
                  const std::vector<framework::BlockDesc*>& grad_sub_block,
X
Xin Pan 已提交
32
                  std::vector<framework::OpDesc*>* grad_op_descs,
M
minqiyang 已提交
33
                  std::unordered_map<std::string, std::string>* grad_to_var) {
X
Xin Pan 已提交
34 35
  PADDLE_ENFORCE(grad_op_descs->empty());
  std::vector<std::unique_ptr<framework::OpDesc>> descs =
M
minqiyang 已提交
36 37 38
      framework::OpInfoMap::Instance()
          .Get(op_desc.Type())
          .GradOpMaker()(op_desc, no_grad_set, grad_to_var, grad_sub_block);
J
JiabinYang 已提交
39

X
Xin Pan 已提交
40 41 42
  for (auto& desc : descs) {
    grad_op_descs->emplace_back(desc.release());
  }
M
minqiyang 已提交
43 44
}

45 46
void InitGrad(VarBase* var, platform::DeviceContext* dev_ctx) {
  PADDLE_ENFORCE_NOT_NULL(var, "Could not get valid var base");
M
minqiyang 已提交
47 48
  PADDLE_ENFORCE_NOT_NULL(dev_ctx,
                          "Could not get valid device from forward op");
49 50 51 52 53 54 55 56 57

  if (var->grads_ == nullptr) {
    auto& var_t = var->var_->Get<framework::LoDTensor>();
    var->grads_ = new VarBase(var->GradName(), framework::proto::VarType::FP32,
                              framework::vectorize(var_t.dims()),
                              dev_ctx->GetPlace(), true, false);
    auto grad_t = var->grads_->var_->GetMutable<framework::LoDTensor>();
    operators::math::set_constant(*dev_ctx, grad_t, 0.0);
  }
M
minqiyang 已提交
58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
}

platform::Place GetExpectedPlace(platform::Place place, VarBasePtrMap inputs) {
  platform::Place result = place;
  for (auto it : inputs) {
    for (VarBase* var : it.second) {
      platform::Place tmp_place =
          var->var_->Get<framework::LoDTensor>().place();
      if (!platform::is_same_place(tmp_place, result)) {
        PADDLE_THROW(
            "Input variable should keep in the same place: %s, but get place: "
            "%s of input %s instead",
            result, tmp_place, it.first);
      }
    }
  }

  return result;
M
minqiyang 已提交
76 77
}

78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
framework::VariableNameMap CreateInputVarNameMap(
    const OpBase* op, const VarBasePtrMap& varbase_map) {
  framework::VariableNameMap result;

  auto& info_map = framework::OpInfoMap::Instance();
  auto* op_info = info_map.GetNullable(op->Type());
  if (op_info == nullptr || op_info->proto_ == nullptr) {
    return result;
  }

  for (auto& in : op_info->Proto().inputs()) {
    auto it = varbase_map.find(in.name());
    if (it == varbase_map.end()) {
      PADDLE_ENFORCE(in.dispensable());
      result[in.name()] = {};
    } else {
      auto var_vector = it->second;
      std::vector<std::string> args;
      args.reserve(var_vector.size());
      for (VarBase* var_base : var_vector) {
        args.emplace_back(var_base->Name());
      }
      result[in.name()] = args;
    }
  }
  return result;
}

framework::VariableNameMap CreateOutputVarNameMap(
    const OpBase* op, const VarBasePtrMap& varbase_map) {
  framework::VariableNameMap result;

  auto& info_map = framework::OpInfoMap::Instance();
  auto* op_info = info_map.GetNullable(op->Type());
  if (op_info == nullptr || op_info->proto_ == nullptr) {
    return result;
  }

  for (auto& out : op_info->Proto().outputs()) {
    auto it = varbase_map.find(out.name());
    if (it == varbase_map.end()) {
      PADDLE_ENFORCE(out.dispensable());
      result[out.name()] = {};
    } else {
      auto var_vector = it->second;
      std::vector<std::string> args;
      args.reserve(var_vector.size());
      for (VarBase* var_base : var_vector) {
        args.emplace_back(var_base->Name());
      }
      result[out.name()] = args;
    }
  }
  return result;
}

134 135 136
Tracer::Tracer(framework::BlockDesc* root_block) : root_block_(root_block) {
}

M
minqiyang 已提交
137 138
std::set<std::string> Tracer::Trace(OpBase* op, const VarBasePtrMap& inputs,
                                    const VarBasePtrMap& outputs,
139
                                    framework::AttributeMap attrs_map,
M
minqiyang 已提交
140 141
                                    const platform::Place expected_place,
                                    const bool stop_gradient) {
M
minqiyang 已提交
142 143 144
  framework::VariableValueMap invars_map;
  framework::VariableValueMap outvars_map;

145 146
  // Construct input_vars_map and output_vars_map
  std::map<std::string, VarBase*> current_vars_map;
M
minqiyang 已提交
147 148 149
  op->input_vars_ = inputs;
  for (auto it : op->input_vars_) {
    auto& invars = invars_map[it.first];
M
minqiyang 已提交
150
    invars.reserve(it.second.size());
M
minqiyang 已提交
151
    for (VarBase* inp : it.second) {
152 153
      PADDLE_ENFORCE_NOT_NULL(inp->var_, "op %s input %s nullptr", op->Type(),
                              inp->Name());
M
minqiyang 已提交
154

M
minqiyang 已提交
155
      invars.emplace_back(inp->var_);
156 157
      if (!stop_gradient) {
        current_vars_map[inp->Name()] = inp;
M
minqiyang 已提交
158
      }
159 160 161
      VLOG(3) << "input var name: " << inp->Name()
              << " inited: " << inp->var_->IsInitialized()
              << " stop_grad: " << inp->IsStopGradient();
M
minqiyang 已提交
162
    }
M
minqiyang 已提交
163
    op->TrackPreOp(it.first, it.second);
M
minqiyang 已提交
164 165 166 167 168 169
  }

  op->output_vars_ = outputs;
  for (auto it : op->output_vars_) {
    auto& outvars = outvars_map[it.first];
    const std::vector<VarBase*>& outputs = it.second;
M
minqiyang 已提交
170
    outvars.reserve(outputs.size());
171
    for (size_t i = 0U; i < outputs.size(); ++i) {
M
minqiyang 已提交
172
      VarBase* out = outputs[i];
M
minqiyang 已提交
173
      outvars.emplace_back(out->var_);
X
Xin Pan 已提交
174
      out->TrackPreOp(op, it.first, i, stop_gradient);
175 176 177
      if (!stop_gradient) {
        current_vars_map[out->Name()] = out;
      }
M
minqiyang 已提交
178

179 180 181
      VLOG(3) << "input var name: " << out->Name()
              << " inited: " << out->var_->IsInitialized()
              << " stop_grad: " << out->IsStopGradient();
M
minqiyang 已提交
182 183 184
    }
  }

185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202
  // Check attrs and create op
  framework::VariableNameMap invars_name_map =
      CreateInputVarNameMap(op, inputs);
  framework::VariableNameMap outvars_name_map =
      CreateOutputVarNameMap(op, outputs);

  auto& info = framework::OpInfoMap::Instance().Get(op->Type());
  if (info.Checker() != nullptr) {
    info.Checker()->Check(&attrs_map);
  }

  std::unique_ptr<framework::OperatorBase> op_base =
      framework::OpRegistry::CreateOp(op->Type(), invars_name_map,
                                      outvars_name_map, attrs_map);

  // TODO(minqiyang): Support infer var type in imperative mode
  // Run forward op
  VLOG(3) << "tracer running " << op->Type();
M
minqiyang 已提交
203 204 205 206 207 208 209 210
  framework::RuntimeContext ctx(invars_map, outvars_map);

  // TODO(panyx0718): Cache p.
  framework::OperatorWithKernel* op_kernel =
      dynamic_cast<framework::OperatorWithKernel*>(op_base.get());
  PADDLE_ENFORCE_NOT_NULL(op_kernel, "only support op with kernel");

  framework::Scope scope;
P
Paddle CI 已提交
211 212 213
  op->place_ = GetExpectedPlace(expected_place, inputs);
  PreparedOp prepared_op = PreparedOp::Prepare(ctx, *op_kernel, op->place_);
  prepared_op.op.RuntimeInferShape(scope, op->place_, ctx);
X
polish  
Xin Pan 已提交
214 215 216
  prepared_op.func(
      framework::ExecutionContext(prepared_op.op, scope, *prepared_op.dev_ctx,
                                  prepared_op.ctx, prepared_op.kernel_configs));
M
minqiyang 已提交
217

218
  // construct backward op
M
minqiyang 已提交
219
  std::set<std::string> vars_saved_for_backward;
M
minqiyang 已提交
220
  if (!stop_gradient) {
221 222 223 224 225
    VLOG(5) << "start construct backward op";

    // construct grad op descs
    std::unique_ptr<framework::OpDesc> fwd_op_desc(new framework::OpDesc(
        op->Type(), invars_name_map, outvars_name_map, attrs_map));
226 227
    std::unique_ptr<std::unordered_map<std::string, std::string>> grad_to_var(
        new std::unordered_map<std::string, std::string>());
228 229 230
    // NOTE(minqiyang): We don't support control flow op in imperative now
    // Add grad_block_ when we want to support it
    CreateGradOp(*fwd_op_desc, {}, {}, &op->grad_op_descs_, grad_to_var.get());
X
Xin Pan 已提交
231

232
    VLOG(5) << "create grad op desc: " << op->grad_op_descs_[0]->Type();
M
minqiyang 已提交
233

234 235 236 237 238 239
    const size_t grad_op_count = op->grad_op_descs_.size();

    op->grad_input_vars_.resize(grad_op_count);
    op->grad_output_vars_.resize(grad_op_count);

    for (size_t i = 0; i < grad_op_count; ++i) {
X
Xin Pan 已提交
240 241 242
      framework::OpDesc* grad_op_desc = op->grad_op_descs_[i];
      for (auto it : grad_op_desc->Inputs()) {
        auto& grad_in_vars = op->grad_input_vars_[i][it.first];
243
        grad_in_vars.reserve(it.second.size());
X
Xin Pan 已提交
244 245 246
        for (const std::string& grad_invar : it.second) {
          auto var_it = grad_to_var->find(grad_invar);
          if (var_it == grad_to_var->end()) {
247 248
            auto fwd_var_it = current_vars_map.find(grad_invar);
            PADDLE_ENFORCE(fwd_var_it != current_vars_map.end());
X
Xin Pan 已提交
249
            // Forward inputs or outputs.
250
            grad_in_vars.emplace_back(fwd_var_it->second->var_);
X
Xin Pan 已提交
251
          } else {
252 253
            VarBase* var = current_vars_map[var_it->second];
            InitGrad(var, prepared_op.GetDeviceContext());
X
Xin Pan 已提交
254
            // Douts.
255
            grad_in_vars.emplace_back(var->grads_->var_);
X
Xin Pan 已提交
256
          }
M
minqiyang 已提交
257 258

          vars_saved_for_backward.insert(it.first);
X
Xin Pan 已提交
259 260 261 262 263 264 265 266 267 268
        }
      }

      for (auto it : grad_op_desc->Outputs()) {
        auto& grad_out_vars = op->grad_output_vars_[i][it.first];
        for (const std::string& grad_outvar : it.second) {
          auto var_it = grad_to_var->find(grad_outvar);
          PADDLE_ENFORCE(var_it != grad_to_var->end(),
                         "Could not found the grad op output var, should this "
                         "operator %s's stop gradient be True",
269 270 271
                         op->Type());
          VarBase* var = current_vars_map[var_it->second];
          InitGrad(var, prepared_op.GetDeviceContext());
X
Xin Pan 已提交
272
          grad_out_vars.push_back(var->grads_->var_);
M
minqiyang 已提交
273 274 275 276 277
        }
      }
    }
  }

M
minqiyang 已提交
278
  return vars_saved_for_backward;
M
minqiyang 已提交
279 280
}

281 282 283
std::vector<VarBase*> Tracer::PyTrace(OpBase* op,
                                      const std::vector<VarBase*>& inputs,
                                      bool stop_gradient) {
284 285
  VLOG(3) << "py_trace " << op->Type();

X
Xin Pan 已提交
286
  op->input_vars_[PyLayer::kFwdInp] = inputs;
287 288 289 290

  std::vector<framework::Variable*> ret_vars =
      PyLayer::Apply(op->forward_id_, inputs);

M
minqiyang 已提交
291
  op->TrackPreOp(PyLayer::kFwdInp, inputs);
292

293 294 295 296 297 298 299
  std::vector<VarBase*>& outputs = op->output_vars_[PyLayer::kFwdOut];
  outputs.reserve(ret_vars.size());
  for (size_t i = 0U; i != ret_vars.size(); ++i) {
    framework::Variable* v = ret_vars[i];
    VarBase* out = new VarBase(string::Sprintf("%s_out_%d", op->Type(), i), v,
                               nullptr, stop_gradient);
    outputs.emplace_back(out);
X
Xin Pan 已提交
300
    out->TrackPreOp(op, PyLayer::kFwdOut, i, stop_gradient);
301
  }
302

303
  if (!stop_gradient) {
304
    VLOG(5) << "start construct backward op";
X
Xin Pan 已提交
305 306
    op->grad_input_vars_.resize(1);
    op->grad_output_vars_.resize(1);
X
Xin Pan 已提交
307
    auto& grad_input_vars =
X
Xin Pan 已提交
308
        op->grad_input_vars_[0][framework::GradVarName(PyLayer::kFwdInp)];
X
Xin Pan 已提交
309
    auto& grad_output_vars =
X
Xin Pan 已提交
310
        op->grad_output_vars_[0][framework::GradVarName(PyLayer::kFwdOut)];
311 312 313 314 315 316 317

    for (const VarBase* inp : inputs) {
      grad_input_vars.push_back(inp->var_);
    }
    for (VarBase* out : outputs) {
      grad_input_vars.push_back(out->var_);
    }
M
minqiyang 已提交
318

319
    // TODO(minqiyang): Add GPU support for PyLayer, only support CPU now
M
minqiyang 已提交
320
    platform::CPUPlace place;
321
    for (VarBase* out : outputs) {
322
      InitGrad(out, platform::DeviceContextPool::Instance().Get(place));
M
minqiyang 已提交
323
      grad_input_vars.push_back(out->grads_->var_);
324
    }
M
minqiyang 已提交
325

326 327
    for (VarBase* inp : inputs) {
      InitGrad(inp, platform::DeviceContextPool::Instance().Get(place));
M
minqiyang 已提交
328
      grad_output_vars.push_back(inp->grads_->var_);
329 330 331 332 333
    }
  }
  return outputs;
}

M
minqiyang 已提交
334
}  // namespace imperative
335
}  // namespace paddle