executor.cc 19.1 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
17
#include <memory>
18
#include <unordered_map>
19
#include <unordered_set>
S
sneaxiy 已提交
20
#include <utility>
D
dongdaxiang 已提交
21 22 23
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
Y
Yi Wang 已提交
24 25 26 27 28
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
D
dongdaxiang 已提交
29 30
#include "paddle/fluid/framework/trainer_desc.pb.h"
#include "paddle/fluid/framework/trainer_factory.h"
31
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
32
#include "paddle/fluid/framework/variable_helper.h"
Z
Zeng Jinle 已提交
33
#include "paddle/fluid/operators/controlflow/conditional_block_op_helper.h"
34
#include "paddle/fluid/operators/controlflow/recurrent_op_helper.h"
S
sneaxiy 已提交
35
#include "paddle/fluid/operators/controlflow/while_op_helper.h"
W
Wu Yi 已提交
36
#include "paddle/fluid/operators/distributed/distributed.h"
Y
Yi Wang 已提交
37
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
38
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
39

40
#ifdef PADDLE_WITH_NGRAPH
B
baojun 已提交
41
#include "paddle/fluid/operators/ngraph/ngraph_engine.h"
42 43
#endif

D
dzhwinter 已提交
44
DECLARE_bool(benchmark);
45
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
46
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
Q
qijun 已提交
47 48 49

namespace paddle {
namespace framework {
X
Xin Pan 已提交
50 51 52 53 54
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
55

Q
Qiao Longfei 已提交
56
ExecutorPrepareContext::ExecutorPrepareContext(
S
sneaxiy 已提交
57 58 59 60 61
    const framework::ProgramDesc& prog, size_t block_id)
    : prog_(prog), block_id_(block_id) {}

void ExecutorPrepareContext::PrepareUnusedVars(
    const std::vector<std::string>& keep_vars, bool force_disable_gc) {
Z
Zeng Jinle 已提交
62 63 64 65 66 67 68 69 70 71 72
#ifdef PADDLE_WITH_NGRAPH
  if (FLAGS_use_ngraph) {
    // FIXME(zjl): There is difference when ngraph and gc are both enabled
    // in unittests. I do not know why it happens. Maybe ngraph engine
    // would cache some variables?
    LOG_FIRST_N(WARNING, 1)
        << "FLAGS_use_ngraph=True, garbage collection strategy is "
           "disabled in Executor";
    force_disable_gc = true;
  }
#endif
S
sneaxiy 已提交
73 74 75
  force_disable_gc_ = force_disable_gc;
  if (GetEagerDeletionThreshold() < 0 || force_disable_gc_) {
    return;
S
sneaxiy 已提交
76
  }
Z
Zeng Jinle 已提交
77 78 79 80

  // If gc is enabled and block size > 1
  if (prog_.Size() > 1) {
    operators::PrepareSafeEagerDeletionOnConditionalOpAndConditionalGradOp(
81 82 83
        prog_, block_id_, ops_);
    operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp(prog_, block_id_,
                                                               ops_);
Z
Zeng Jinle 已提交
84
    operators::PrepareSafeEagerDeletionOnRecurrentOpAndRecurrentGradOp(
85
        prog_, block_id_, ops_);
Z
Zeng Jinle 已提交
86
  }
S
sneaxiy 已提交
87
  unused_vars_ = GetUnusedVars(prog_.Block(block_id_), ops_, keep_vars);
S
sneaxiy 已提交
88
}
Y
Yu Yang 已提交
89

Q
Qiao Longfei 已提交
90
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
91
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
92
}
Y
Yu Yang 已提交
93

D
dzhwinter 已提交
94
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
95

Y
Yancey1989 已提交
96
void Executor::Close() {
W
Wu Yi 已提交
97
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
98 99
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
100 101 102
  auto client =
      paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  client->SendComplete();
W
Wu Yi 已提交
103
#endif
Y
Yancey1989 已提交
104
}
W
Wu Yi 已提交
105

L
Liu Yiqun 已提交
106 107 108
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
109 110 111 112 113 114 115 116 117 118 119 120 121 122

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
123
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
124 125
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
126 127
      } else {
        auto* ptr = scope->Var(var->Name());
128
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
129 130
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
131 132 133 134 135
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
136
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
137 138
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
139 140 141 142
    }
  }
}

143 144 145
std::shared_ptr<TrainerBase> Executor::InitForDataset(
    const ProgramDesc& main_program, const std::string& trainer_desc_str,
    Scope* scope, Dataset* dataset) {
D
dongdaxiang 已提交
146 147
  VLOG(3) << "Start to RunFromDataset in executor";
  TrainerDesc trainer_desc;
H
hutuxian 已提交
148
  bool success = trainer_desc.ParseFromString(trainer_desc_str);
149 150
  PADDLE_ENFORCE_EQ(success, true, "Fail to parse TrainerDesc from string:\n%s",
                    trainer_desc_str.c_str());
D
dongdaxiang 已提交
151 152 153 154 155 156 157 158
  VLOG(3) << "Going to create trainer, trainer class is "
          << trainer_desc.class_name();
  std::shared_ptr<TrainerBase> trainer;
  trainer = TrainerFactory::CreateTrainer(trainer_desc.class_name());
  // initialize trainer
  VLOG(3) << "Going to initialize trainer";
  trainer->Initialize(trainer_desc, dataset);
  VLOG(3) << "Set root scope here";
D
dongdaxiang 已提交
159
  trainer->SetScope(scope);
D
dongdaxiang 已提交
160 161 162 163 164
  // prepare training environment and helper environment
  VLOG(3) << "Try to init train environment";
  trainer->InitTrainerEnv(main_program, place_);
  VLOG(3) << "Try to init other environment";
  trainer->InitOtherEnv(main_program);
165 166 167 168 169 170
  return trainer;
}

void Executor::RunFromDataset(std::shared_ptr<TrainerBase> trainer) {
  PADDLE_ENFORCE_NE(trainer, nullptr,
                    "Trainer is nullptr, invoke InitForDataset first");
D
dongdaxiang 已提交
171 172 173 174 175 176
  // training and finalize training
  VLOG(3) << "Trainer starts to run";
  trainer->Run();
  VLOG(3) << "Trainer going to finalize";
  trainer->Finalize();
}
D
dongdaxiang 已提交
177

Y
Yu Yang 已提交
178
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
S
sneaxiy 已提交
179 180 181
                   bool create_local_scope, bool create_vars,
                   const std::vector<std::string>& skip_ref_cnt_vars,
                   bool force_disable_gc) {
X
Xin Pan 已提交
182
  platform::RecordBlock b(block_id);
183
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
S
sneaxiy 已提交
184
  auto ctx = Prepare(pdesc, block_id, skip_ref_cnt_vars, force_disable_gc);
Q
Qiao Longfei 已提交
185
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
186 187
}

188 189 190 191 192 193 194
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
195
    const BlockDesc& block,
L
Liu Yiqun 已提交
196
    const std::map<std::string, const LoDTensor*>& feed_targets,
197 198
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
199
  for (auto* op : block.AllOps()) {
200 201
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
202
      // The input variable's name of feed_op should be feed_holder_name.
203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

218
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
219
      // When feed operator are present, so should be feed_holder.
220 221 222 223 224 225 226
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
227 228 229 230 231 232 233 234 235 236 237 238
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
239 240
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
241 242
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
243
  for (auto* op : block.AllOps()) {
244 245
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
246
      // The output variable's name of fetch_op should be fetch_holder_name.
247 248 249 250 251 252 253 254 255 256 257 258 259 260 261
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

262
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
263
      // When fetch operator are present, so should be fetch_holder.
264 265 266 267 268 269 270
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
271 272 273 274 275
  }

  return fetch_count > 0;
}

276 277 278
std::unique_ptr<ExecutorPrepareContext> Executor::PrepareCtxCache(
    const ProgramDesc& program, int block_id,
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
279
  return Prepare(program, block_id, skip_ref_cnt_vars, force_disable_gc);
280 281
}

282
void Executor::Run(const ProgramDesc& program, Scope* scope,
283 284
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
285 286
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
287
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
288
  platform::RecordBlock b(kProgramId);
289
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
290
  bool has_feed_ops =
291
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
292
  bool has_fetch_ops =
293
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
294 295

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
296
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
297
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
298 299
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
300
  }
301 302
  auto* global_block = copy_program->MutableBlock(0);

303
  if (!has_feed_ops) {
304 305
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
306
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
307 308 309
    feed_holder->SetPersistable(true);

    int i = 0;
310
    for (auto& feed_target : (*feed_targets)) {
311
      std::string var_name = feed_target.first;
M
minqiyang 已提交
312
      VLOG(3) << "feed target's name: " << var_name;
313 314 315 316 317 318 319 320 321 322 323 324 325

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

326
  if (!has_fetch_ops) {
327 328
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
329
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
330 331 332
    fetch_holder->SetPersistable(true);

    int i = 0;
333
    for (auto& fetch_target : (*fetch_targets)) {
334
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
335
      VLOG(3) << "fetch target's name: " << var_name;
336 337 338 339 340 341 342 343 344 345 346 347 348

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

349
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
350 351 352
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
353 354
}

Q
Qiao Longfei 已提交
355
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
356
    const ProgramDesc& program, int block_id,
S
sneaxiy 已提交
357
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
S
sneaxiy 已提交
358 359
  std::unique_ptr<ExecutorPrepareContext> ctx(
      new ExecutorPrepareContext(program, block_id));
Y
Yu Yang 已提交
360 361 362 363 364
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
365
#ifdef PADDLE_WITH_NGRAPH
366
  if (FLAGS_use_ngraph && ctx->block_id_ == 0) {
367 368 369 370
    paddle::operators::NgraphEngine::FuseNgraphOps(
        ctx->prog_.Block(ctx->block_id_), &ctx->ops_);
  }
#endif
S
sneaxiy 已提交
371
  ctx->PrepareUnusedVars(skip_ref_cnt_vars, force_disable_gc);
Q
Qiyang Min 已提交
372
  return ctx;
Y
Yu Yang 已提交
373 374
}

T
refine  
typhoonzero 已提交
375
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
376
    const ProgramDesc& program, const std::vector<int>& block_ids,
S
sneaxiy 已提交
377 378
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars,
    bool force_disable_gc) {
S
fix bug  
sneaxiy 已提交
379 380 381 382
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
383
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
384
  size_t idx = 0;
T
typhoonzero 已提交
385 386
  for (auto& bid : block_ids) {
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
S
sneaxiy 已提交
387
    auto* ctx = new ExecutorPrepareContext(program, bid);
T
typhoonzero 已提交
388 389 390 391
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
S
sneaxiy 已提交
392 393 394 395 396
    if (skip_ref_cnt_vars.empty()) {
      ctx->PrepareUnusedVars(std::vector<std::string>(), force_disable_gc);
    } else {
      ctx->PrepareUnusedVars(skip_ref_cnt_vars[idx], force_disable_gc);
    }
T
typhoonzero 已提交
397
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
398
    ++idx;
T
typhoonzero 已提交
399 400 401 402
  }
  return result;
}

Y
Yu Yang 已提交
403
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
404 405
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
406
  platform::RecordBlock b(kProgramId);
407
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
408 409 410 411
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
412 413
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
414
  }
Y
Yu Yang 已提交
415

S
sneaxiy 已提交
416
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
417
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
418
  if (!ctx->force_disable_gc_ && max_memory_size >= 0) {
S
sneaxiy 已提交
419 420
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
421
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
422
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
423 424
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
425
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
426 427 428
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
429
#endif
S
sneaxiy 已提交
430 431
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
432 433 434 435 436
#ifdef PADDLE_WITH_CUDA
    }
#endif
  }

Y
Yu Yang 已提交
437
  for (auto& op : ctx->ops_) {
438
    op->Run(*local_scope, place_);
S
fix bug  
sneaxiy 已提交
439
    if (gc) {
S
sneaxiy 已提交
440
      DeleteUnusedTensors(*local_scope, op.get(), ctx->unused_vars_, gc.get());
S
sneaxiy 已提交
441
    }
Y
Yu Yang 已提交
442
  }
S
sneaxiy 已提交
443

S
fix bug  
sneaxiy 已提交
444
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
445

Q
qiaolongfei 已提交
446
  if (local_scope != scope) {
Y
Yu Yang 已提交
447
    scope->DeleteScope(local_scope);
448
  } else {
Q
qiaolongfei 已提交
449 450 451 452 453
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
454 455
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
456 457
      scope->DropKids();
    }
Y
Yu Yang 已提交
458 459 460
  }
}

461 462
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
463
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
464 465 466
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
467 468
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

469
  PADDLE_ENFORCE(
470
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
471 472
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
473
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
474 475
      "Program in the prepared context should has fetch_ops.");

476 477 478 479 480
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
481 482
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
483 484 485
    }
  }

W
Wu Yi 已提交
486
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
487 488 489 490 491 492

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
493
      *(*fetch_targets)[fetch_target_name] =
494 495 496 497 498
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

499 500
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
501
  VLOG(3) << "use_mkldnn=True";
502 503 504 505 506 507 508 509
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
510 511 512
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
513 514
#endif
}
Q
qijun 已提交
515 516
}  // namespace framework
}  // namespace paddle