layer.h 17.9 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

Z
Zeng Jinle 已提交
17 18 19 20 21
#include <cstdint>
#include <map>     // NOLINT
#include <memory>  // NOLINT
#include <mutex>   // NOLINT
#include <set>
22
#include <string>         // NOLINT
M
minqiyang 已提交
23
#include <unordered_map>  // NOLINT
24 25 26 27 28 29
#include <utility>
#include <vector>  // NOLINT

// clang-format off
#include "paddle/fluid/framework/python_headers.h"
// clang-format on
M
minqiyang 已提交
30

31 32 33
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/var_desc.h"
M
minqiyang 已提交
34
#include "paddle/fluid/framework/var_type_inference.h"
35
#include "paddle/fluid/platform/enforce.h"
M
minqiyang 已提交
36
#include "paddle/fluid/platform/device_context.h"
M
minqiyang 已提交
37
#include "paddle/fluid/operators/math/math_function.h"
38
#include "paddle/fluid/imperative/backward_strategy.h"
M
minqiyang 已提交
39
#include "paddle/fluid/imperative/type_defs.h"
Z
Zeng Jinle 已提交
40
#include "paddle/fluid/imperative/flags.h"
M
minqiyang 已提交
41

42 43 44
namespace paddle {
namespace imperative {

M
minqiyang 已提交
45 46
class VarBase;

X
Xin Pan 已提交
47 48
namespace py = ::pybind11;

X
Xin Pan 已提交
49 50 51 52 53
class PreparedOp {
 public:
  PreparedOp(const framework::OperatorBase& op,
             const framework::RuntimeContext& ctx,
             framework::OperatorWithKernel::OpKernelFunc func,
X
polish  
Xin Pan 已提交
54 55 56 57 58 59 60
             platform::DeviceContext* dev_ctx,
             std::vector<framework::KernelConfig>* kernel_configs)
      : op(op),
        ctx(ctx),
        func(func),
        dev_ctx(dev_ctx),
        kernel_configs(kernel_configs) {}
X
Xin Pan 已提交
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78

  static PreparedOp Prepare(const framework::RuntimeContext& ctx,
                            const framework::OperatorWithKernel& op,
                            const platform::Place& place) {
    platform::DeviceContextPool& pool = platform::DeviceContextPool::Instance();
    auto* dev_ctx = pool.Get(place);

    // check if op[type] has kernel registered.
    auto& all_op_kernels = op.AllOpKernels();
    auto kernels_iter = all_op_kernels.find(op.Type());
    if (kernels_iter == all_op_kernels.end()) {
      PADDLE_THROW(
          "There are no kernels which are registered in the %s operator.",
          op.Type());
    }

    framework::OperatorWithKernel::OpKernelMap& kernels = kernels_iter->second;

79 80 81
    auto expected_kernel_key =
        op.GetExpectedKernelType(framework::ExecutionContext(
            op, framework::Scope(), *dev_ctx, ctx, nullptr));
X
Xin Pan 已提交
82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
    VLOG(3) << "expected_kernel_key:" << expected_kernel_key;

    auto kernel_iter = kernels.find(expected_kernel_key);
#ifdef PADDLE_WITH_MKLDNN
    // workaround for missing MKLDNN kernel when FLAGS_use_mkldnn env var is set
    if (kernel_iter == kernels.end() &&
        expected_kernel_key.library_type_ == framework::LibraryType::kMKLDNN) {
      VLOG(3) << "missing MKLDNN kernel: fallbacking to PLAIN one";
      expected_kernel_key.library_type_ = framework::LibraryType::kPlain;
      expected_kernel_key.data_layout_ = framework::DataLayout::kAnyLayout;
      kernel_iter = kernels.find(expected_kernel_key);
    }
#endif
    if (kernel_iter == kernels.end()) {
      PADDLE_THROW("op %s does not have kernel for %s", op.Type(),
                   KernelTypeToString(expected_kernel_key));
    }
X
polish  
Xin Pan 已提交
99 100
    std::vector<framework::KernelConfig>* kernel_configs =
        op.GetKernelConfig(expected_kernel_key);
101

X
polish  
Xin Pan 已提交
102
    return PreparedOp(op, ctx, kernel_iter->second, dev_ctx, kernel_configs);
X
Xin Pan 已提交
103 104
  }

M
minqiyang 已提交
105 106
  inline platform::DeviceContext* GetDeviceContext() const { return dev_ctx; }

X
Xin Pan 已提交
107 108 109 110
  const framework::OperatorBase& op;
  const framework::RuntimeContext& ctx;
  framework::OperatorWithKernel::OpKernelFunc func;
  platform::DeviceContext* dev_ctx;
X
polish  
Xin Pan 已提交
111
  std::vector<framework::KernelConfig>* kernel_configs;
X
Xin Pan 已提交
112
};
X
polish  
Xin Pan 已提交
113

114 115
class OpBase;

Z
Zeng Jinle 已提交
116 117 118 119 120 121 122 123 124 125 126 127 128
class ThreadSafeNameSet {
 public:
  void Insert(const std::string& name);

  void Remove(const std::string& name);

  std::vector<std::string> Names() const;

 private:
  std::multiset<std::string> set_;
  mutable std::mutex mtx_;
};

M
minqiyang 已提交
129 130 131 132 133
/* The wrapper for Variable which holds a Variable and a VarBase of its
 * gradient. This object should be managed totally by Python intepreter.
 *
 * Nearly all interface should be implemented in C++.
 */
134 135
class VarBase {
 public:
Z
Zeng Jinle 已提交
136 137
  static std::vector<std::string> AliveVarNames();

138
  // Internal interface, create VarBase from exist variable
139 140
  VarBase(const std::string& name, std::unique_ptr<framework::Variable> var,
          VarBase* grad, bool stop_gradient)
141 142
      : VarBase(name, var->Get<framework::LoDTensor>().type(),
                var->Get<framework::LoDTensor>().dims(),
143
                var->Get<framework::LoDTensor>().place(), nullptr, grad,
144
                stop_gradient, false, true) {
145 146
    var_ = std::move(var);
  }
147 148 149 150 151 152 153 154 155 156 157 158 159

  // Python interface
  VarBase(const std::string& name, const framework::proto::VarType::Type dtype,
          const std::vector<int64_t>& shape, const platform::Place& place,
          bool stop_gradient, bool persistable)
      : VarBase(name, dtype, framework::make_ddim(shape), place, stop_gradient,
                persistable) {}

  // Internal interface, create VarBase from with ddim
  VarBase(const std::string& name, const framework::proto::VarType::Type dtype,
          const framework::DDim& shape, const platform::Place& place,
          bool stop_gradient, bool persistable)
      : VarBase(name, dtype, shape, place, nullptr, nullptr, stop_gradient,
160 161 162 163 164 165 166 167
                persistable, true) {}

  // Grad used constructor
  VarBase(const std::string& name, const framework::proto::VarType::Type dtype,
          const std::vector<int64_t>& shape, const platform::Place& place,
          bool stop_gradient, bool persistable, bool need_initialize)
      : VarBase(name, dtype, framework::make_ddim(shape), place, nullptr,
                nullptr, stop_gradient, persistable, need_initialize) {}
M
minqiyang 已提交
168 169

 private:
M
minqiyang 已提交
170
  // TODO(minqiyang): need support SelectedRows
171 172
  VarBase(const std::string& name, framework::proto::VarType::Type dtype,
          const framework::DDim& shape, const platform::Place& place,
173
          std::unique_ptr<framework::Variable> var, VarBase* grad,
174
          bool stop_gradient, bool persistable, bool need_initialize)
175
      : name_(name),
M
minqiyang 已提交
176
        type_(framework::proto::VarType::LOD_TENSOR),
177
        place_(place),
178
        var_(std::move(var)),
X
polish  
Xin Pan 已提交
179
        grads_(grad),
180
        dtype_(dtype),
X
Xin Pan 已提交
181
        stop_gradient_(stop_gradient),
182
        persistable_(persistable),
X
Xin Pan 已提交
183
        pre_op_(nullptr),
184
        pre_op_out_name_(),
185 186
        pre_op_out_idx_(-1) {
    if (!var_) {
187
      var_.reset(new framework::Variable());
188
    }
189

M
minqiyang 已提交
190 191
    auto tensor = var_->GetMutable<framework::LoDTensor>();
    tensor->Resize(shape);
192 193 194
    if (need_initialize) {
      tensor->mutable_data(place, dtype);
      is_initialized_ = true;
195
      VLOG(8) << "initialized varbase: " << name_ << " type: " << dtype
196 197 198
              << " place: " << place;
    } else {
      is_initialized_ = false;
199
      VLOG(8) << "not initialized varbase: " << name_;
200
    }
201 202
    VLOG(8) << "create varbase: " << name_ << " type: " << dtype
            << " place: " << place << "Stop gradient: " << stop_gradient_;
Z
Zeng Jinle 已提交
203 204 205 206

    if (IsDebugEnabled()) {
      name_set_.Insert(name_);
    }
207
  }
208

M
minqiyang 已提交
209
 public:
M
minqiyang 已提交
210
  virtual ~VarBase() {
211 212
    pre_op_ = nullptr;
    pre_op_out_idx_ = -1;
213
    VLOG(8) << "destruct varbase: " << name_;
Z
Zeng Jinle 已提交
214 215 216
    if (IsDebugEnabled()) {
      name_set_.Remove(name_);
    }
M
minqiyang 已提交
217
  }
218

219 220
  inline void SetName(const std::string& name) { name_ = name; }
  inline std::string Name() const { return name_; }
221
  inline bool IsInitialize() const { return is_initialized_; }
222
  inline void SetInitialize(bool inited) { is_initialized_ = inited; }
223 224 225 226 227 228 229 230
  inline std::vector<int64_t> Shape() const {
    if (var_->IsInitialized()) {
      return framework::vectorize(var_->Get<framework::LoDTensor>().dims());
    } else {
      return {};
    }
  }

M
minqiyang 已提交
231 232 233 234 235 236
  inline framework::DDim Dims() const {
    return var_->Get<framework::LoDTensor>().dims();
  }

  // data type. e.g.. FP32
  inline void SetDataType(framework::proto::VarType::Type type) {
M
minqiyang 已提交
237
    auto tensor = var_->GetMutable<framework::LoDTensor>();
M
minqiyang 已提交
238
    tensor->mutable_data(tensor->place(), type);
M
minqiyang 已提交
239
  }
240
  inline framework::proto::VarType::Type DataType() const { return dtype_; }
M
minqiyang 已提交
241 242 243 244

  // tensor type. e.g.. LoDTensor
  inline void SetType(framework::proto::VarType::Type type) { type_ = type; }
  inline framework::proto::VarType::Type Type() const { return type_; }
X
Xin Pan 已提交
245

M
minqiyang 已提交
246 247
  inline void SetStopGradient(bool stop_gradient) {
    stop_gradient_ = stop_gradient;
248 249 250
    if (grads_) {
      grads_->stop_gradient_ = stop_gradient;
    }
M
minqiyang 已提交
251 252
  }
  inline bool IsStopGradient() const { return stop_gradient_; }
X
Xin Pan 已提交
253

254 255
  inline void SetPersistable(bool persistable) { persistable_ = persistable; }
  inline bool IsPersistable() const { return persistable_; }
256
  inline void SetPreOp(OpBase* op) { pre_op_ = op; }
257
  inline platform::Place GetPlace() { return place_; }
258 259 260
  inline OpBase* PreOp() const { return pre_op_; }
  inline int PreOpOutIdx() const { return pre_op_out_idx_; }

261
  void RunBackward(const detail::BackwardStrategy& bck_stratedy);
262

263 264 265 266 267 268 269 270
  inline void ResetPreOp(OpBase* op) {
    if (op == pre_op_) {
      // clear pre_op info when op equals to var's pre_op
      pre_op_ = nullptr;
      pre_op_out_idx_ = -1;
    }
  }

271 272 273 274
  void InitBuffer() {
    if (!is_initialized_) {
      var_->GetMutable<framework::LoDTensor>()->mutable_data(place_, dtype_);
      is_initialized_ = true;
275
      VLOG(8) << "initialized varbase: " << name_ << " type: " << dtype_
276 277
              << " place: " << place_;
    } else {
278
      VLOG(8) << "var: " << name_ << " has already been initialized ";
279 280 281
    }
  }

X
Xin Pan 已提交
282
  void TrackPreOp(OpBase* pre_op, const std::string& pre_op_out_name,
M
minqiyang 已提交
283
                  int pre_op_out_idx, bool pre_op_stop_gradient) {
X
Xin Pan 已提交
284 285 286
    pre_op_ = pre_op;
    pre_op_out_name_ = pre_op_out_name;
    pre_op_out_idx_ = pre_op_out_idx;
M
minqiyang 已提交
287 288 289
    if (pre_op_stop_gradient) {
      stop_gradient_ = pre_op_stop_gradient;
    }
X
Xin Pan 已提交
290 291 292
  }

  void ClearGradient() {
293
    VLOG(1) << "clear gradient of " << Name();
M
minqiyang 已提交
294 295 296 297 298 299 300
    if (grads_ && grads_->var_ && grads_->var_->IsInitialized()) {
      auto grads_t = grads_->var_->GetMutable<framework::LoDTensor>();
      operators::math::set_constant(
          *(platform::DeviceContextPool::Instance().Get(
              grads_->var_->Get<framework::LoDTensor>().place())),
          grads_t, 0.0);
    }
X
Xin Pan 已提交
301 302
  }

M
minqiyang 已提交
303
  framework::LoDTensor& GradValue();
304

M
minqiyang 已提交
305 306
  std::unique_ptr<VarBase> NewVarBase(const platform::Place& dst_place,
                                      const bool blocking) const;
M
minqiyang 已提交
307

M
minqiyang 已提交
308
  inline std::string GradName() const {
309
    return string::Sprintf("%s@IGrad", Name());
M
minqiyang 已提交
310 311
  }

312
  std::string name_;
M
minqiyang 已提交
313
  framework::proto::VarType::Type type_;
314
  platform::Place place_;
M
minqiyang 已提交
315

316
  std::unique_ptr<framework::Variable> var_;
317
  std::shared_ptr<VarBase> grads_;
318

X
Xin Pan 已提交
319
 private:
320
  framework::proto::VarType::Type dtype_;
321
  bool stop_gradient_;
322
  bool persistable_;
323
  bool is_initialized_;
X
Xin Pan 已提交
324 325 326
  OpBase* pre_op_;
  std::string pre_op_out_name_;
  int pre_op_out_idx_;
Z
Zeng Jinle 已提交
327 328 329

  // A private flag to check memory leak
  static ThreadSafeNameSet name_set_;
330 331
};

M
minqiyang 已提交
332 333 334
/* The wrapper for OpDesc which holds a OpDesc and a OpDesc of its
 * gradient. This object should be managed totally by Python intepreter.
 */
335
class PYBIND11_HIDDEN OpBase {
336
 public:
337 338 339
  OpBase(const std::string& type)
      : type_(type),
        trace_id_(-1),
340 341
        place_(platform::CPUPlace()),
        backward_hooks_() {}
342 343

  virtual ~OpBase() {
344 345 346 347 348
    for (const auto& it : outputs_ref) {
      auto vb = it.lock();
      if (vb) {
        VLOG(3) << "Op reset by" << vb->name_;
        vb->ResetPreOp(this);
349
      }
X
Xin Pan 已提交
350
    }
351
    // TODO(minqiyang): remove op_desc from block_desc in tracer
352
    // release resource
X
Xin Pan 已提交
353 354 355
    for (framework::OpDesc* desc : grad_op_descs_) {
      delete desc;
    }
356 357
  }

358
  std::vector<VarBasePtrMap> ApplyGrad(
359 360
      BackwardSumMap* bck_map, GradientRef* grad_ref,
      const detail::BackwardStrategy& bck_stratedy);
361

362 363 364 365 366 367
  inline std::string Type() const { return type_; }
  inline std::string GradOpType(size_t index) const {
    PADDLE_ENFORCE_NOT_NULL(grad_op_descs_[index]);
    return grad_op_descs_[index]->Type();
  }

Y
Yan Xu 已提交
368
  void RegisterBackwardHooks(const py::object& callable);
369 370 371

  void InvokeBackwardHooks();

372 373 374
  void TrackPreOp(
      const std::string& inp_name,
      const std::vector<std::shared_ptr<imperative::VarBase>>& inputs) {
M
minqiyang 已提交
375 376 377
    auto& pre_ops_list = pre_ops_[inp_name];
    pre_ops_list.reserve(inputs.size());
    auto& pre_ops_out_idx_list = pre_ops_out_idx_[inp_name];
378
    for (std::shared_ptr<imperative::VarBase> inp_var : inputs) {
M
minqiyang 已提交
379 380 381 382 383 384 385 386 387 388 389
      if (inp_var->PreOp() && !inp_var->IsStopGradient()) {
        VLOG(3) << "add pre op " << inp_var->PreOp()->Type() << " in slot "
                << inp_name;
        pre_ops_list.emplace_back(inp_var->PreOp());
        pre_ops_out_idx_list.push_back(inp_var->PreOpOutIdx());
      } else {
        VLOG(3) << "no pre op in slot " << inp_name
                << " input var stop_gradient: " << inp_var->IsStopGradient();
        pre_ops_list.emplace_back(nullptr);
        // pre_ops_out_idx_list.push_back(-1);
      }
390 391 392 393 394
    }
  }

  std::string type_;
  int trace_id_;
X
polish  
Xin Pan 已提交
395 396

  // Note: each fwd op corresponds to a vector of bwd ops.
X
Xin Pan 已提交
397
  std::vector<framework::OpDesc*> grad_op_descs_;
X
Xin Pan 已提交
398

P
Paddle CI 已提交
399
  platform::Place place_;
M
minqiyang 已提交
400

M
minqiyang 已提交
401
  OpBasePtrMap pre_ops_;
X
Xin Pan 已提交
402
  std::map<std::string, std::vector<int>> pre_ops_out_idx_;
403

404
  VarBaseWeakPtrList outputs_ref;
X
polish  
Xin Pan 已提交
405
  // Inputs to a vector of bwd ops.
M
minqiyang 已提交
406
  std::vector<VarBasePtrMap> grad_input_vars_;
X
polish  
Xin Pan 已提交
407
  // Outputs to a vector of bwd ops.
M
minqiyang 已提交
408
  std::vector<VarBasePtrMap> grad_output_vars_;
X
polish  
Xin Pan 已提交
409

410
  std::vector<py::object> backward_hooks_;
M
minqiyang 已提交
411 412

  framework::AttributeMap attrs_;
413 414 415 416 417 418
};

class Layer {
 public:
  virtual ~Layer() {}

419 420 421
  virtual std::vector<std::shared_ptr<VarBase>> Forward(
      const std::vector<std::shared_ptr<VarBase>>& inputs) {
    std::vector<std::shared_ptr<VarBase>> vars;
422 423
    return vars;
  }
X
Xin Pan 已提交
424
};
425

M
minqiyang 已提交
426 427 428 429
// infer var type context for imperative mode
class PYBIND11_HIDDEN RuntimeInferVarTypeContext
    : public framework::InferVarTypeContext {
 public:
M
minqiyang 已提交
430
  RuntimeInferVarTypeContext(const imperative::VarBasePtrMap* inputs,
M
minqiyang 已提交
431 432 433 434 435 436 437 438 439 440 441
                             imperative::VarBasePtrMap* outputs,
                             const framework::AttributeMap* attrs_map)
      : InferVarTypeContext(nullptr, nullptr),
        inputs_(inputs),
        outputs_(outputs),
        attrs_(attrs_map),
        input_names_(),
        output_names_(),
        var_set_() {
    input_names_.reserve(inputs_->size());
    for (auto& it : *inputs_) {
442
      for (std::shared_ptr<imperative::VarBase> var : it.second) {
M
minqiyang 已提交
443 444 445 446 447 448 449
        input_names_[it.first].emplace_back(var->Name());
        var_set_[var->Name()] = var;
      }
    }

    output_names_.reserve(outputs_->size());
    for (auto& it : *outputs_) {
450
      for (std::shared_ptr<imperative::VarBase> var : it.second) {
M
minqiyang 已提交
451 452 453 454 455 456
        output_names_[it.first].emplace_back(var->Name());
        var_set_[var->Name()] = var;
      }
    }
  }

M
minqiyang 已提交
457 458 459
  virtual ~RuntimeInferVarTypeContext() {}

  framework::Attribute GetAttr(const std::string& name) const override {
M
minqiyang 已提交
460 461 462 463
    PADDLE_ENFORCE_NOT_NULL(attrs_);
    return attrs_->at(name);
  }

M
minqiyang 已提交
464
  bool HasVar(const std::string& name) const override {
M
minqiyang 已提交
465 466 467
    return var_set_.count(name) > 0;
  }

M
minqiyang 已提交
468
  bool HasInput(const std::string& name) const override {
M
minqiyang 已提交
469 470 471 472
    PADDLE_ENFORCE_NOT_NULL(inputs_);
    return inputs_->count(name) > 0;
  }

M
minqiyang 已提交
473
  bool HasOutput(const std::string& name) const override {
M
minqiyang 已提交
474 475 476 477
    PADDLE_ENFORCE_NOT_NULL(outputs_);
    return outputs_->count(name) > 0;
  }

M
minqiyang 已提交
478 479
  const std::vector<std::string>& Input(
      const std::string& name) const override {
M
minqiyang 已提交
480 481 482
    return input_names_.at(name);
  }

M
minqiyang 已提交
483 484
  const std::vector<std::string>& Output(
      const std::string& name) const override {
M
minqiyang 已提交
485 486 487
    return output_names_.at(name);
  }

M
minqiyang 已提交
488 489
  framework::proto::VarType::Type GetType(
      const std::string& name) const override {
M
minqiyang 已提交
490
    return var_set_.at(name)->Type();
M
minqiyang 已提交
491 492
  }

M
minqiyang 已提交
493 494
  void SetType(const std::string& name,
               framework::proto::VarType::Type type) override {
495 496 497 498 499
    if (name == "kLookupTablePath") {
      VLOG(2) << "SUPER UGLY FIX, remove this when move imperative mode in C++";
    } else {
      var_set_[name]->SetType(type);
    }
M
minqiyang 已提交
500 501
  }

M
minqiyang 已提交
502 503
  framework::proto::VarType::Type GetDataType(
      const std::string& name) const override {
M
minqiyang 已提交
504
    return var_set_.at(name)->DataType();
M
minqiyang 已提交
505 506
  }

M
minqiyang 已提交
507 508
  void SetDataType(const std::string& name,
                   framework::proto::VarType::Type type) override {
M
minqiyang 已提交
509
    var_set_[name]->SetDataType(type);
M
minqiyang 已提交
510 511
  }

M
minqiyang 已提交
512 513
  std::vector<framework::proto::VarType::Type> GetDataTypes(
      const std::string& name) const override {
M
minqiyang 已提交
514 515 516
    PADDLE_THROW("GetDataTypes is not supported in runtime InferVarType");
  }

M
minqiyang 已提交
517 518 519
  void SetDataTypes(const std::string& name,
                    const std::vector<framework::proto::VarType::Type>&
                        multiple_data_type) override {
M
minqiyang 已提交
520 521 522
    PADDLE_THROW("SetDataTypes is not supported in runtime InferVarType");
  }

M
minqiyang 已提交
523
  std::vector<int64_t> GetShape(const std::string& name) const override {
M
minqiyang 已提交
524 525 526
    PADDLE_THROW("Do not handle Shape in runtime InferVarType");
  }

M
minqiyang 已提交
527 528
  void SetShape(const std::string& name,
                const std::vector<int64_t>& dims) override {
M
minqiyang 已提交
529 530 531
    PADDLE_THROW("Do not handle Shape in runtime InferVarType");
  }

M
minqiyang 已提交
532
  int32_t GetLoDLevel(const std::string& name) const override {
M
minqiyang 已提交
533 534 535
    PADDLE_THROW("Do not handle LoDLevel in runtime InferVarType");
  }

M
minqiyang 已提交
536
  void SetLoDLevel(const std::string& name, int32_t lod_level) override {
M
minqiyang 已提交
537 538 539 540 541 542 543 544 545
    PADDLE_THROW("Do not handle LoDLevel in runtime InferVarType");
  }

 private:
  const imperative::VarBasePtrMap* inputs_;
  imperative::VarBasePtrMap* outputs_;
  const framework::AttributeMap* attrs_;
  std::unordered_map<std::string, std::vector<std::string>> input_names_;
  std::unordered_map<std::string, std::vector<std::string>> output_names_;
546 547
  std::unordered_map<std::string, std::shared_ptr<imperative::VarBase>>
      var_set_;
548 549 550 551
};

}  // namespace imperative
}  // namespace paddle