executor.cc 19.4 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
17
#include <memory>
18
#include <unordered_map>
19
#include <unordered_set>
S
sneaxiy 已提交
20
#include <utility>
D
dongdaxiang 已提交
21 22 23
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
Y
Yi Wang 已提交
24 25 26 27 28
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
D
dongdaxiang 已提交
29 30
#include "paddle/fluid/framework/trainer_desc.pb.h"
#include "paddle/fluid/framework/trainer_factory.h"
31
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
32
#include "paddle/fluid/framework/variable_helper.h"
Z
Zeng Jinle 已提交
33
#include "paddle/fluid/operators/controlflow/conditional_block_op_helper.h"
34
#include "paddle/fluid/operators/controlflow/recurrent_op_helper.h"
S
sneaxiy 已提交
35
#include "paddle/fluid/operators/controlflow/while_op_helper.h"
W
Wu Yi 已提交
36
#include "paddle/fluid/operators/distributed/distributed.h"
Y
Yi Wang 已提交
37
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
38
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
39

40
#ifdef PADDLE_WITH_NGRAPH
B
baojun 已提交
41
#include "paddle/fluid/operators/ngraph/ngraph_engine.h"
42 43
#endif

D
dzhwinter 已提交
44
DECLARE_bool(benchmark);
45
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
46
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
Q
qijun 已提交
47 48 49

namespace paddle {
namespace framework {
X
Xin Pan 已提交
50 51 52 53 54
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
55

Q
Qiao Longfei 已提交
56
ExecutorPrepareContext::ExecutorPrepareContext(
S
sneaxiy 已提交
57 58 59 60 61
    const framework::ProgramDesc& prog, size_t block_id)
    : prog_(prog), block_id_(block_id) {}

void ExecutorPrepareContext::PrepareUnusedVars(
    const std::vector<std::string>& keep_vars, bool force_disable_gc) {
Z
Zeng Jinle 已提交
62 63 64 65 66 67 68 69 70 71 72
#ifdef PADDLE_WITH_NGRAPH
  if (FLAGS_use_ngraph) {
    // FIXME(zjl): There is difference when ngraph and gc are both enabled
    // in unittests. I do not know why it happens. Maybe ngraph engine
    // would cache some variables?
    LOG_FIRST_N(WARNING, 1)
        << "FLAGS_use_ngraph=True, garbage collection strategy is "
           "disabled in Executor";
    force_disable_gc = true;
  }
#endif
S
sneaxiy 已提交
73 74 75
  force_disable_gc_ = force_disable_gc;
  if (GetEagerDeletionThreshold() < 0 || force_disable_gc_) {
    return;
S
sneaxiy 已提交
76
  }
Z
Zeng Jinle 已提交
77 78 79 80

  // If gc is enabled and block size > 1
  if (prog_.Size() > 1) {
    operators::PrepareSafeEagerDeletionOnConditionalOpAndConditionalGradOp(
81 82 83
        prog_, block_id_, ops_);
    operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp(prog_, block_id_,
                                                               ops_);
Z
Zeng Jinle 已提交
84
    operators::PrepareSafeEagerDeletionOnRecurrentOpAndRecurrentGradOp(
85
        prog_, block_id_, ops_);
Z
Zeng Jinle 已提交
86
  }
S
sneaxiy 已提交
87
  unused_vars_ = GetUnusedVars(prog_.Block(block_id_), ops_, keep_vars);
S
sneaxiy 已提交
88
}
Y
Yu Yang 已提交
89

Q
Qiao Longfei 已提交
90
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
91
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
92
}
Y
Yu Yang 已提交
93

D
dzhwinter 已提交
94
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
95

96 97 98 99 100 101 102 103 104 105 106 107 108 109
Executor::~Executor() {
#ifdef PADDLE_WITH_MKLDNN
  // Clear mkl-dnn cache, unless explicitly
  // (as set in constructor) marked not to do so
  // this is needed to have mkl-dnn unit tests working
  if (platform::is_cpu_place(place_)) {
    platform::DeviceContextPool& pool = platform::DeviceContextPool::Instance();
    platform::MKLDNNDeviceContext* dev_ctx =
        (platform::MKLDNNDeviceContext*)pool.Get(place_);
    dev_ctx->ResetBlobMap();
  }
#endif
}

Y
Yancey1989 已提交
110
void Executor::Close() {
W
Wu Yi 已提交
111
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
112 113
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
114 115 116
  auto client =
      paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  client->SendComplete();
W
Wu Yi 已提交
117
#endif
Y
Yancey1989 已提交
118
}
W
Wu Yi 已提交
119

L
Liu Yiqun 已提交
120 121 122
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
123 124 125 126 127 128 129 130 131 132 133 134 135 136

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
137
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
138 139
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
140 141
      } else {
        auto* ptr = scope->Var(var->Name());
142
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
143 144
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
145 146 147 148 149
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
150
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
151 152
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
153 154 155 156
    }
  }
}

157 158 159
std::shared_ptr<TrainerBase> Executor::InitForDataset(
    const ProgramDesc& main_program, const std::string& trainer_desc_str,
    Scope* scope, Dataset* dataset) {
D
dongdaxiang 已提交
160 161
  VLOG(3) << "Start to RunFromDataset in executor";
  TrainerDesc trainer_desc;
H
hutuxian 已提交
162
  bool success = trainer_desc.ParseFromString(trainer_desc_str);
163 164
  PADDLE_ENFORCE_EQ(success, true, "Fail to parse TrainerDesc from string:\n%s",
                    trainer_desc_str.c_str());
D
dongdaxiang 已提交
165 166 167 168 169 170 171 172
  VLOG(3) << "Going to create trainer, trainer class is "
          << trainer_desc.class_name();
  std::shared_ptr<TrainerBase> trainer;
  trainer = TrainerFactory::CreateTrainer(trainer_desc.class_name());
  // initialize trainer
  VLOG(3) << "Going to initialize trainer";
  trainer->Initialize(trainer_desc, dataset);
  VLOG(3) << "Set root scope here";
D
dongdaxiang 已提交
173
  trainer->SetScope(scope);
D
dongdaxiang 已提交
174 175 176 177 178
  // prepare training environment and helper environment
  VLOG(3) << "Try to init train environment";
  trainer->InitTrainerEnv(main_program, place_);
  VLOG(3) << "Try to init other environment";
  trainer->InitOtherEnv(main_program);
179 180 181 182 183 184
  return trainer;
}

void Executor::RunFromDataset(std::shared_ptr<TrainerBase> trainer) {
  PADDLE_ENFORCE_NE(trainer, nullptr,
                    "Trainer is nullptr, invoke InitForDataset first");
D
dongdaxiang 已提交
185 186 187
  // training and finalize training
  VLOG(3) << "Trainer starts to run";
  trainer->Run();
D
Dong Daxiang 已提交
188 189 190
}

void Executor::ReleaseTrainer(std::shared_ptr<TrainerBase> trainer) {
D
dongdaxiang 已提交
191 192 193
  VLOG(3) << "Trainer going to finalize";
  trainer->Finalize();
}
D
dongdaxiang 已提交
194

Y
Yu Yang 已提交
195
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
S
sneaxiy 已提交
196 197 198
                   bool create_local_scope, bool create_vars,
                   const std::vector<std::string>& skip_ref_cnt_vars,
                   bool force_disable_gc) {
X
Xin Pan 已提交
199
  platform::RecordBlock b(block_id);
200
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
S
sneaxiy 已提交
201
  auto ctx = Prepare(pdesc, block_id, skip_ref_cnt_vars, force_disable_gc);
Q
Qiao Longfei 已提交
202
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
203 204
}

205 206 207 208 209 210 211
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
212
    const BlockDesc& block,
L
Liu Yiqun 已提交
213
    const std::map<std::string, const LoDTensor*>& feed_targets,
214 215
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
216
  for (auto* op : block.AllOps()) {
217 218
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
219
      // The input variable's name of feed_op should be feed_holder_name.
220 221 222 223 224 225 226 227 228 229 230 231 232 233 234
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

235
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
236
      // When feed operator are present, so should be feed_holder.
237 238 239 240 241 242 243
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
244 245 246 247 248 249 250 251 252 253 254 255
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
256 257
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
258 259
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
260
  for (auto* op : block.AllOps()) {
261 262
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
263
      // The output variable's name of fetch_op should be fetch_holder_name.
264 265 266 267 268 269 270 271 272 273 274 275 276 277 278
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

279
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
280
      // When fetch operator are present, so should be fetch_holder.
281 282 283 284 285 286 287
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
288 289 290 291 292 293
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
294 295
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
296 297
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
298
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
299
  platform::RecordBlock b(kProgramId);
300
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
301
  bool has_feed_ops =
302
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
303
  bool has_fetch_ops =
304
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
305 306

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
307
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
308
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
309 310
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
311
  }
312 313
  auto* global_block = copy_program->MutableBlock(0);

314
  if (!has_feed_ops) {
315 316
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
317
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
318 319 320
    feed_holder->SetPersistable(true);

    int i = 0;
321
    for (auto& feed_target : (*feed_targets)) {
322
      std::string var_name = feed_target.first;
M
minqiyang 已提交
323
      VLOG(3) << "feed target's name: " << var_name;
324 325 326 327 328 329 330 331 332 333 334 335 336

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

337
  if (!has_fetch_ops) {
338 339
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
340
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
341 342 343
    fetch_holder->SetPersistable(true);

    int i = 0;
344
    for (auto& fetch_target : (*fetch_targets)) {
345
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
346
      VLOG(3) << "fetch target's name: " << var_name;
347 348 349 350 351 352 353 354 355 356 357 358 359

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

360
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
361 362 363
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
364 365
}

Q
Qiao Longfei 已提交
366
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
367
    const ProgramDesc& program, int block_id,
S
sneaxiy 已提交
368
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
S
sneaxiy 已提交
369 370
  std::unique_ptr<ExecutorPrepareContext> ctx(
      new ExecutorPrepareContext(program, block_id));
Y
Yu Yang 已提交
371 372 373 374 375
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
376
#ifdef PADDLE_WITH_NGRAPH
377
  if (FLAGS_use_ngraph && ctx->block_id_ == 0) {
378 379 380 381
    paddle::operators::NgraphEngine::FuseNgraphOps(
        ctx->prog_.Block(ctx->block_id_), &ctx->ops_);
  }
#endif
S
sneaxiy 已提交
382
  ctx->PrepareUnusedVars(skip_ref_cnt_vars, force_disable_gc);
Q
Qiyang Min 已提交
383
  return ctx;
Y
Yu Yang 已提交
384 385
}

T
refine  
typhoonzero 已提交
386
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
387
    const ProgramDesc& program, const std::vector<int>& block_ids,
S
sneaxiy 已提交
388 389
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars,
    bool force_disable_gc) {
S
fix bug  
sneaxiy 已提交
390 391 392 393
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
394
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
395
  size_t idx = 0;
T
typhoonzero 已提交
396 397
  for (auto& bid : block_ids) {
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
S
sneaxiy 已提交
398
    auto* ctx = new ExecutorPrepareContext(program, bid);
T
typhoonzero 已提交
399 400 401 402
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
S
sneaxiy 已提交
403 404 405 406 407
    if (skip_ref_cnt_vars.empty()) {
      ctx->PrepareUnusedVars(std::vector<std::string>(), force_disable_gc);
    } else {
      ctx->PrepareUnusedVars(skip_ref_cnt_vars[idx], force_disable_gc);
    }
T
typhoonzero 已提交
408
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
409
    ++idx;
T
typhoonzero 已提交
410 411 412 413
  }
  return result;
}

Y
Yu Yang 已提交
414
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
415 416
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
417
  platform::RecordBlock b(kProgramId);
418
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
419 420 421 422
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
423 424
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
425
  }
Y
Yu Yang 已提交
426

S
sneaxiy 已提交
427
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
428
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
429
  if (!ctx->force_disable_gc_ && max_memory_size >= 0) {
S
sneaxiy 已提交
430 431
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
432
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
433
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
434 435
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
436
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
437 438 439
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
440
#endif
S
sneaxiy 已提交
441 442
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
443 444 445 446 447
#ifdef PADDLE_WITH_CUDA
    }
#endif
  }

Y
Yu Yang 已提交
448
  for (auto& op : ctx->ops_) {
449
    op->Run(*local_scope, place_);
S
fix bug  
sneaxiy 已提交
450
    if (gc) {
S
sneaxiy 已提交
451
      DeleteUnusedTensors(*local_scope, op.get(), ctx->unused_vars_, gc.get());
S
sneaxiy 已提交
452
    }
Y
Yu Yang 已提交
453
  }
S
sneaxiy 已提交
454

S
fix bug  
sneaxiy 已提交
455
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
456

Q
qiaolongfei 已提交
457
  if (local_scope != scope) {
Y
Yu Yang 已提交
458
    scope->DeleteScope(local_scope);
459
  } else {
Q
qiaolongfei 已提交
460 461 462 463 464
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
465 466
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
467 468
      scope->DropKids();
    }
Y
Yu Yang 已提交
469 470 471
  }
}

472 473
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
474
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
475 476 477
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
478 479
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

480
  PADDLE_ENFORCE(
481
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
482 483
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
484
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
485 486
      "Program in the prepared context should has fetch_ops.");

487 488 489 490 491
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
492 493
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
494 495 496
    }
  }

W
Wu Yi 已提交
497
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
498 499 500 501 502 503

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
504
      *(*fetch_targets)[fetch_target_name] =
505 506 507 508 509
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

510 511
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
512
  VLOG(3) << "use_mkldnn=True";
513 514 515 516 517 518 519 520
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
521 522 523
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
524 525
#endif
}
Q
qijun 已提交
526 527
}  // namespace framework
}  // namespace paddle