executor.cc 19.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
S
sneaxiy 已提交
17 18 19 20
#include <memory>
#include <unordered_map>
#include <unordered_set>
#include <utility>
Y
Yang Yang 已提交
21

Y
Yi Wang 已提交
22 23 24 25 26
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
27
#include "paddle/fluid/framework/threadpool.h"
28
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
29
#include "paddle/fluid/framework/variable_helper.h"
S
sneaxiy 已提交
30
#include "paddle/fluid/operators/controlflow/while_op_helper.h"
W
Wu Yi 已提交
31
#include "paddle/fluid/operators/distributed/distributed.h"
Y
Yi Wang 已提交
32
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
33
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
34

35
#ifdef PADDLE_WITH_NGRAPH
B
baojun 已提交
36
#include "paddle/fluid/operators/ngraph/ngraph_engine.h"
37 38
#endif

D
dzhwinter 已提交
39
DECLARE_bool(benchmark);
40
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
B
baojun-nervana 已提交
41
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
Q
qijun 已提交
42 43 44

namespace paddle {
namespace framework {
X
Xin Pan 已提交
45 46 47 48 49
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
50

S
fix bug  
sneaxiy 已提交
51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
static std::unordered_map<std::string, size_t> GetNonPersistableReferenceCounts(
    const BlockDesc& block, const std::vector<std::string>& skip_var_list) {
  std::unordered_map<std::string, size_t> ref_cnts;
  std::unordered_set<std::string> skip_vars(skip_var_list.begin(),
                                            skip_var_list.end());

  auto update_ref_cnts = [&](OpDesc* op_desc, const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        if (skip_vars.count(name)) continue;
        auto* var_desc = block.FindVar(name);
        if (var_desc == nullptr || var_desc->Persistable()) continue;
        auto type = var_desc->Proto()->type().type();
        if (type != proto::VarType::LOD_TENSOR &&
            type != proto::VarType::SELECTED_ROWS &&
            type != proto::VarType::LOD_TENSOR_ARRAY) {
          continue;
        }
S
sneaxiy 已提交
69
        ++ref_cnts[name];
S
fix bug  
sneaxiy 已提交
70 71 72 73 74 75 76 77 78 79 80
      }
    }
  };

  for (auto op_desc : block.AllOps()) {
    update_ref_cnts(op_desc, op_desc->Inputs());
    update_ref_cnts(op_desc, op_desc->Outputs());
  }
  return ref_cnts;
}

Q
Qiao Longfei 已提交
81
ExecutorPrepareContext::ExecutorPrepareContext(
S
fix bug  
sneaxiy 已提交
82
    const framework::ProgramDesc& prog, size_t block_id,
S
sneaxiy 已提交
83 84 85 86 87
    const std::vector<std::string>& keep_vars, bool force_disable_gc)
    : prog_(prog), block_id_(block_id), force_disable_gc_(force_disable_gc) {
  if (GetEagerDeletionThreshold() >= 0 && !force_disable_gc_) {
    global_ref_cnts_ =
        GetNonPersistableReferenceCounts(prog.Block(block_id), keep_vars);
S
sneaxiy 已提交
88 89
  }
}
Y
Yu Yang 已提交
90

Q
Qiao Longfei 已提交
91
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
92
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
93
}
Y
Yu Yang 已提交
94

S
fix bug  
sneaxiy 已提交
95
static void DeleteUnusedTensors(
S
sneaxiy 已提交
96
    const Scope& scope, const OperatorBase* op, GarbageCollector* gc,
S
fix bug  
sneaxiy 已提交
97
    std::unordered_map<std::string, size_t>* ref_cnts) {
S
sneaxiy 已提交
98
  std::deque<std::shared_ptr<memory::Allocation>> garbages;
S
sneaxiy 已提交
99 100 101 102 103 104

  auto handler = [&](const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        auto it = ref_cnts->find(name);
        if (it == ref_cnts->end()) continue;
S
sneaxiy 已提交
105 106 107 108
        if (--(it->second) != 0) {
          continue;
        }
        auto* var = scope.FindVar(name);
S
sneaxiy 已提交
109
        if (var == nullptr) {
S
sneaxiy 已提交
110 111 112 113 114 115
          continue;
        }

        VLOG(2) << "Erase variable " << name;
        if (var->IsType<LoDTensor>()) {
          garbages.emplace_back(
S
sneaxiy 已提交
116 117 118 119 120
              var->GetMutable<LoDTensor>()->MoveMemoryHolder());
        } else if (var->IsType<SelectedRows>()) {
          garbages.emplace_back(var->GetMutable<SelectedRows>()
                                    ->mutable_value()
                                    ->MoveMemoryHolder());
S
sneaxiy 已提交
121 122 123
        } else if (var->IsType<LoDTensorArray>()) {
          auto* lod_tensor_arr = var->GetMutable<LoDTensorArray>();
          for (auto& t : *lod_tensor_arr) {
S
sneaxiy 已提交
124
            garbages.emplace_back(t.MoveMemoryHolder());
S
sneaxiy 已提交
125
          }
S
sneaxiy 已提交
126 127
        } else {
          PADDLE_THROW("Type %s of %s is not supported eager deletion",
S
sneaxiy 已提交
128
                       framework::ToTypeName(var->Type()), name);
S
sneaxiy 已提交
129 130 131 132 133 134 135 136
        }
      }
    }
  };

  handler(op->Inputs());
  handler(op->Outputs());

S
sneaxiy 已提交
137 138
  if (!garbages.empty()) {
    gc->Add(std::move(garbages));
S
sneaxiy 已提交
139 140 141
  }
}

D
dzhwinter 已提交
142
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
143

Y
Yancey1989 已提交
144
void Executor::Close() {
W
Wu Yi 已提交
145
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
146 147
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
148 149 150
  auto client =
      paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  client->SendComplete();
W
Wu Yi 已提交
151
#endif
Y
Yancey1989 已提交
152
}
W
Wu Yi 已提交
153

L
Liu Yiqun 已提交
154 155 156
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
157 158 159 160 161 162 163 164 165 166 167 168 169 170

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
171
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
172 173
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
174 175
      } else {
        auto* ptr = scope->Var(var->Name());
176
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
177 178
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
179 180 181 182 183
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
184
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
185 186
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
187 188 189 190
    }
  }
}

Y
Yu Yang 已提交
191
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
S
sneaxiy 已提交
192 193 194
                   bool create_local_scope, bool create_vars,
                   const std::vector<std::string>& skip_ref_cnt_vars,
                   bool force_disable_gc) {
X
Xin Pan 已提交
195
  platform::RecordBlock b(block_id);
196
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
B
baojun 已提交
197 198 199
#ifdef PADDLE_WITH_NGRAPH
  if (FLAGS_use_ngraph) operators::NgraphEngine::EnableNgraph(pdesc);
#endif
S
sneaxiy 已提交
200
  auto ctx = Prepare(pdesc, block_id, skip_ref_cnt_vars, force_disable_gc);
Q
Qiao Longfei 已提交
201
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
202 203
}

204 205 206 207 208 209 210
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
211
    const BlockDesc& block,
L
Liu Yiqun 已提交
212
    const std::map<std::string, const LoDTensor*>& feed_targets,
213 214
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
215
  for (auto* op : block.AllOps()) {
216 217
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
218
      // The input variable's name of feed_op should be feed_holder_name.
219 220 221 222 223 224 225 226 227 228 229 230 231 232 233
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

234
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
235
      // When feed operator are present, so should be feed_holder.
236 237 238 239 240 241 242
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
243 244 245 246 247 248 249 250 251 252 253 254
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
255 256
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
257 258
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
259
  for (auto* op : block.AllOps()) {
260 261
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
262
      // The output variable's name of fetch_op should be fetch_holder_name.
263 264 265 266 267 268 269 270 271 272 273 274 275 276 277
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

278
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
279
      // When fetch operator are present, so should be fetch_holder.
280 281 282 283 284 285 286
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
287 288 289 290 291 292
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
293 294
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
295 296
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
297
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
298
  platform::RecordBlock b(kProgramId);
299
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
300
  bool has_feed_ops =
301
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
302
  bool has_fetch_ops =
303
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
304 305

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
306
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
307
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
308 309
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
310
  }
311 312
  auto* global_block = copy_program->MutableBlock(0);

313
  if (!has_feed_ops) {
314 315
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
316
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
317 318 319
    feed_holder->SetPersistable(true);

    int i = 0;
320
    for (auto& feed_target : (*feed_targets)) {
321
      std::string var_name = feed_target.first;
M
minqiyang 已提交
322
      VLOG(3) << "feed target's name: " << var_name;
323 324 325 326 327 328 329 330 331 332 333 334 335

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

336
  if (!has_fetch_ops) {
337 338
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
339
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
340 341 342
    fetch_holder->SetPersistable(true);

    int i = 0;
343
    for (auto& fetch_target : (*fetch_targets)) {
344
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
345
      VLOG(3) << "fetch target's name: " << var_name;
346 347 348 349 350 351 352 353 354 355 356 357 358

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

359
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
360 361 362
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
363 364
}

Q
Qiao Longfei 已提交
365
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
366
    const ProgramDesc& program, int block_id,
S
sneaxiy 已提交
367 368 369
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
  std::unique_ptr<ExecutorPrepareContext> ctx(new ExecutorPrepareContext(
      program, block_id, skip_ref_cnt_vars, force_disable_gc));
Y
Yu Yang 已提交
370 371 372 373 374
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
Q
Qiyang Min 已提交
375
  return ctx;
Y
Yu Yang 已提交
376 377
}

T
refine  
typhoonzero 已提交
378
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
379
    const ProgramDesc& program, const std::vector<int>& block_ids,
S
sneaxiy 已提交
380 381
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars,
    bool force_disable_gc) {
S
fix bug  
sneaxiy 已提交
382 383 384 385
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
386
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
387
  size_t idx = 0;
T
typhoonzero 已提交
388
  for (auto& bid : block_ids) {
S
fix bug  
sneaxiy 已提交
389 390
    ExecutorPrepareContext* ctx;
    if (skip_ref_cnt_vars.empty()) {
S
sneaxiy 已提交
391 392
      ctx = new ExecutorPrepareContext(program, bid, std::vector<std::string>(),
                                       force_disable_gc);
S
fix bug  
sneaxiy 已提交
393
    } else {
S
sneaxiy 已提交
394 395
      ctx = new ExecutorPrepareContext(program, bid, skip_ref_cnt_vars[idx],
                                       force_disable_gc);
S
fix bug  
sneaxiy 已提交
396
    }
T
typhoonzero 已提交
397 398 399 400 401 402
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
403
    ++idx;
T
typhoonzero 已提交
404 405 406 407
  }
  return result;
}

Y
Yu Yang 已提交
408
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
409 410
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
411
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
412 413 414 415
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
416 417
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
418
  }
Y
Yu Yang 已提交
419

S
sneaxiy 已提交
420
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
421
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
422 423 424
  // FIXME(zjl): recurrent_op is rather complex, we would
  // disable gc forcely in recurrent_op
  if (!ctx->force_disable_gc_ && max_memory_size >= 0) {
S
sneaxiy 已提交
425
    ctx->ResetReferenceCount();
S
sneaxiy 已提交
426 427
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
428
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
429
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
430 431
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
432
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
433 434 435
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
436
#endif
S
sneaxiy 已提交
437 438
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
439 440 441
#ifdef PADDLE_WITH_CUDA
    }
#endif
S
sneaxiy 已提交
442 443
    // If gc is enabled and block size > 1
    if (gc && ctx->prog_.Size() > 1) {
S
sneaxiy 已提交
444 445 446
      operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp(ctx->block_id_,
                                                                 ctx->ops_);
    }
S
sneaxiy 已提交
447 448
  }

Y
Yu Yang 已提交
449
  for (auto& op : ctx->ops_) {
450
    op->Run(*local_scope, place_);
S
sneaxiy 已提交
451

S
fix bug  
sneaxiy 已提交
452
    if (gc) {
S
sneaxiy 已提交
453
      DeleteUnusedTensors(*local_scope, op.get(), gc.get(),
S
sneaxiy 已提交
454
                          &(ctx->runtime_ref_cnts_));
S
sneaxiy 已提交
455
    }
Y
Yu Yang 已提交
456
  }
S
sneaxiy 已提交
457

S
fix bug  
sneaxiy 已提交
458
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
459

Q
qiaolongfei 已提交
460
  if (local_scope != scope) {
Y
Yu Yang 已提交
461
    scope->DeleteScope(local_scope);
462
  } else {
Q
qiaolongfei 已提交
463 464 465 466 467
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
468 469
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
470 471
      scope->DropKids();
    }
Y
Yu Yang 已提交
472 473 474
  }
}

475 476
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
477
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
478 479 480
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
481 482
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

483
  PADDLE_ENFORCE(
484
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
485 486
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
487
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
488 489
      "Program in the prepared context should has fetch_ops.");

490 491 492 493 494
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
495 496
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
497 498 499
    }
  }

W
Wu Yi 已提交
500
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
501 502 503 504 505 506

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
507
      *(*fetch_targets)[fetch_target_name] =
508 509 510 511 512
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

513 514
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
515
  VLOG(3) << "use_mkldnn=True";
516 517 518 519 520 521 522 523
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
524 525 526
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
527 528
#endif
}
Q
qijun 已提交
529 530
}  // namespace framework
}  // namespace paddle