executor.cc 19.0 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
17
#include <memory>
18
#include <unordered_map>
19
#include <unordered_set>
S
sneaxiy 已提交
20
#include <utility>
D
dongdaxiang 已提交
21 22 23
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
Y
Yi Wang 已提交
24 25 26 27 28
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
D
dongdaxiang 已提交
29 30
#include "paddle/fluid/framework/trainer_desc.pb.h"
#include "paddle/fluid/framework/trainer_factory.h"
31
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
32
#include "paddle/fluid/framework/variable_helper.h"
Z
Zeng Jinle 已提交
33
#include "paddle/fluid/operators/controlflow/conditional_block_op_helper.h"
34
#include "paddle/fluid/operators/controlflow/recurrent_op_helper.h"
S
sneaxiy 已提交
35
#include "paddle/fluid/operators/controlflow/while_op_helper.h"
W
Wu Yi 已提交
36
#include "paddle/fluid/operators/distributed/distributed.h"
Y
Yi Wang 已提交
37
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
38
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
39

40
#ifdef PADDLE_WITH_NGRAPH
B
baojun 已提交
41
#include "paddle/fluid/operators/ngraph/ngraph_engine.h"
42
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
43 44
#endif

D
dzhwinter 已提交
45
DECLARE_bool(benchmark);
46
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
Q
qijun 已提交
47 48 49

namespace paddle {
namespace framework {
X
Xin Pan 已提交
50 51 52 53 54
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
55

Q
Qiao Longfei 已提交
56
ExecutorPrepareContext::ExecutorPrepareContext(
S
sneaxiy 已提交
57 58 59 60 61
    const framework::ProgramDesc& prog, size_t block_id)
    : prog_(prog), block_id_(block_id) {}

void ExecutorPrepareContext::PrepareUnusedVars(
    const std::vector<std::string>& keep_vars, bool force_disable_gc) {
Z
Zeng Jinle 已提交
62 63 64 65 66 67 68 69 70 71 72
#ifdef PADDLE_WITH_NGRAPH
  if (FLAGS_use_ngraph) {
    // FIXME(zjl): There is difference when ngraph and gc are both enabled
    // in unittests. I do not know why it happens. Maybe ngraph engine
    // would cache some variables?
    LOG_FIRST_N(WARNING, 1)
        << "FLAGS_use_ngraph=True, garbage collection strategy is "
           "disabled in Executor";
    force_disable_gc = true;
  }
#endif
S
sneaxiy 已提交
73 74 75
  force_disable_gc_ = force_disable_gc;
  if (GetEagerDeletionThreshold() < 0 || force_disable_gc_) {
    return;
S
sneaxiy 已提交
76
  }
Z
Zeng Jinle 已提交
77 78 79 80 81 82 83 84 85

  // If gc is enabled and block size > 1
  if (prog_.Size() > 1) {
    operators::PrepareSafeEagerDeletionOnConditionalOpAndConditionalGradOp(
        block_id_, ops_);
    operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp(block_id_, ops_);
    operators::PrepareSafeEagerDeletionOnRecurrentOpAndRecurrentGradOp(
        block_id_, ops_);
  }
S
sneaxiy 已提交
86
  unused_vars_ = GetUnusedVars(prog_.Block(block_id_), ops_, keep_vars);
S
sneaxiy 已提交
87
}
Y
Yu Yang 已提交
88

Q
Qiao Longfei 已提交
89
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
90
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
91
}
Y
Yu Yang 已提交
92

D
dzhwinter 已提交
93
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
94

Y
Yancey1989 已提交
95
void Executor::Close() {
W
Wu Yi 已提交
96
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
97 98
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
99 100 101
  auto client =
      paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  client->SendComplete();
W
Wu Yi 已提交
102
#endif
Y
Yancey1989 已提交
103
}
W
Wu Yi 已提交
104

L
Liu Yiqun 已提交
105 106 107
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
108 109 110 111 112 113 114 115 116 117 118 119 120 121

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
122
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
123 124
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
125 126
      } else {
        auto* ptr = scope->Var(var->Name());
127
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
128 129
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
130 131 132 133 134
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
135
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
136 137
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
138 139 140 141
    }
  }
}

D
dongdaxiang 已提交
142
void Executor::RunFromDataset(const ProgramDesc& main_program, Scope* scope,
X
xujiaqi01 已提交
143
                              Dataset* dataset,
D
dongdaxiang 已提交
144
                              const std::string& trainer_desc_str) {
D
dongdaxiang 已提交
145 146
  VLOG(3) << "Start to RunFromDataset in executor";
  TrainerDesc trainer_desc;
H
hutuxian 已提交
147 148 149
  bool success = trainer_desc.ParseFromString(trainer_desc_str);
  PADDLE_ENFORCE(success, "Fail to parse TrainerDesc from string:\n%s",
                 trainer_desc_str.c_str());
D
dongdaxiang 已提交
150 151 152 153 154 155 156 157
  VLOG(3) << "Going to create trainer, trainer class is "
          << trainer_desc.class_name();
  std::shared_ptr<TrainerBase> trainer;
  trainer = TrainerFactory::CreateTrainer(trainer_desc.class_name());
  // initialize trainer
  VLOG(3) << "Going to initialize trainer";
  trainer->Initialize(trainer_desc, dataset);
  VLOG(3) << "Set root scope here";
D
dongdaxiang 已提交
158
  trainer->SetScope(scope);
D
dongdaxiang 已提交
159 160 161 162 163 164 165 166 167 168 169 170
  // prepare training environment and helper environment
  VLOG(3) << "Try to init train environment";
  trainer->InitTrainerEnv(main_program, place_);
  VLOG(3) << "Try to init other environment";
  trainer->InitOtherEnv(main_program);
  // training and finalize training
  VLOG(3) << "Trainer starts to run";
  trainer->Run();
  VLOG(3) << "Trainer going to finalize";
  trainer->Finalize();
  return;
}
D
dongdaxiang 已提交
171

Y
Yu Yang 已提交
172
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
S
sneaxiy 已提交
173 174 175
                   bool create_local_scope, bool create_vars,
                   const std::vector<std::string>& skip_ref_cnt_vars,
                   bool force_disable_gc) {
X
Xin Pan 已提交
176
  platform::RecordBlock b(block_id);
177
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
S
sneaxiy 已提交
178
  auto ctx = Prepare(pdesc, block_id, skip_ref_cnt_vars, force_disable_gc);
Q
Qiao Longfei 已提交
179
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
180 181
}

182 183 184 185 186 187 188
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
189
    const BlockDesc& block,
L
Liu Yiqun 已提交
190
    const std::map<std::string, const LoDTensor*>& feed_targets,
191 192
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
193
  for (auto* op : block.AllOps()) {
194 195
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
196
      // The input variable's name of feed_op should be feed_holder_name.
197 198 199 200 201 202 203 204 205 206 207 208 209 210 211
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

212
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
213
      // When feed operator are present, so should be feed_holder.
214 215 216 217 218 219 220
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
221 222 223 224 225 226 227 228 229 230 231 232
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
233 234
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
235 236
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
237
  for (auto* op : block.AllOps()) {
238 239
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
240
      // The output variable's name of fetch_op should be fetch_holder_name.
241 242 243 244 245 246 247 248 249 250 251 252 253 254 255
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

256
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
257
      // When fetch operator are present, so should be fetch_holder.
258 259 260 261 262 263 264
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
265 266 267 268 269
  }

  return fetch_count > 0;
}

270 271 272
std::unique_ptr<ExecutorPrepareContext> Executor::PrepareCtxCache(
    const ProgramDesc& program, int block_id,
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
273
  return Prepare(program, block_id, skip_ref_cnt_vars, force_disable_gc);
274 275
}

276
void Executor::Run(const ProgramDesc& program, Scope* scope,
277 278
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
279 280
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
281
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
282
  platform::RecordBlock b(kProgramId);
283
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
284
  bool has_feed_ops =
285
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
286
  bool has_fetch_ops =
287
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
288 289

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
290
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
291
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
292 293
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
294
  }
295 296
  auto* global_block = copy_program->MutableBlock(0);

297
  if (!has_feed_ops) {
298 299
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
300
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
301 302 303
    feed_holder->SetPersistable(true);

    int i = 0;
304
    for (auto& feed_target : (*feed_targets)) {
305
      std::string var_name = feed_target.first;
M
minqiyang 已提交
306
      VLOG(3) << "feed target's name: " << var_name;
307 308 309 310 311 312 313 314 315 316 317 318 319

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

320
  if (!has_fetch_ops) {
321 322
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
323
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
324 325 326
    fetch_holder->SetPersistable(true);

    int i = 0;
327
    for (auto& fetch_target : (*fetch_targets)) {
328
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
329
      VLOG(3) << "fetch target's name: " << var_name;
330 331 332 333 334 335 336 337 338 339 340 341 342

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

343
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
344 345 346
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
347 348
}

Q
Qiao Longfei 已提交
349
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
350
    const ProgramDesc& program, int block_id,
S
sneaxiy 已提交
351
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
S
sneaxiy 已提交
352 353
  std::unique_ptr<ExecutorPrepareContext> ctx(
      new ExecutorPrepareContext(program, block_id));
Y
Yu Yang 已提交
354 355 356 357 358
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
359
#ifdef PADDLE_WITH_NGRAPH
360
  if (FLAGS_use_ngraph && ctx->block_id_ == 0) {
361 362 363 364
    paddle::operators::NgraphEngine::FuseNgraphOps(
        ctx->prog_.Block(ctx->block_id_), &ctx->ops_);
  }
#endif
S
sneaxiy 已提交
365
  ctx->PrepareUnusedVars(skip_ref_cnt_vars, force_disable_gc);
Q
Qiyang Min 已提交
366
  return ctx;
Y
Yu Yang 已提交
367 368
}

T
refine  
typhoonzero 已提交
369
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
370
    const ProgramDesc& program, const std::vector<int>& block_ids,
S
sneaxiy 已提交
371 372
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars,
    bool force_disable_gc) {
S
fix bug  
sneaxiy 已提交
373 374 375 376
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
377
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
378
  size_t idx = 0;
T
typhoonzero 已提交
379 380
  for (auto& bid : block_ids) {
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
S
sneaxiy 已提交
381
    auto* ctx = new ExecutorPrepareContext(program, bid);
T
typhoonzero 已提交
382 383 384 385
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
S
sneaxiy 已提交
386 387 388 389 390
    if (skip_ref_cnt_vars.empty()) {
      ctx->PrepareUnusedVars(std::vector<std::string>(), force_disable_gc);
    } else {
      ctx->PrepareUnusedVars(skip_ref_cnt_vars[idx], force_disable_gc);
    }
T
typhoonzero 已提交
391
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
392
    ++idx;
T
typhoonzero 已提交
393 394 395 396
  }
  return result;
}

Y
Yu Yang 已提交
397
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
398 399
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
400
  platform::RecordBlock b(kProgramId);
401
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
402 403 404 405
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
406 407
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
408
  }
Y
Yu Yang 已提交
409

S
sneaxiy 已提交
410
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
411
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
412 413 414
  // FIXME(zjl): recurrent_op is rather complex, we would
  // disable gc forcely in recurrent_op
  if (!ctx->force_disable_gc_ && max_memory_size >= 0) {
S
sneaxiy 已提交
415 416
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
417
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
418
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
419 420
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
421
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
422 423 424
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
425
#endif
S
sneaxiy 已提交
426 427
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
428 429 430 431 432
#ifdef PADDLE_WITH_CUDA
    }
#endif
  }

Y
Yu Yang 已提交
433
  for (auto& op : ctx->ops_) {
434
    op->Run(*local_scope, place_);
S
fix bug  
sneaxiy 已提交
435
    if (gc) {
S
sneaxiy 已提交
436
      DeleteUnusedTensors(*local_scope, op.get(), ctx->unused_vars_, gc.get());
S
sneaxiy 已提交
437
    }
Y
Yu Yang 已提交
438
  }
S
sneaxiy 已提交
439

S
fix bug  
sneaxiy 已提交
440
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
441

Q
qiaolongfei 已提交
442
  if (local_scope != scope) {
Y
Yu Yang 已提交
443
    scope->DeleteScope(local_scope);
444
  } else {
Q
qiaolongfei 已提交
445 446 447 448 449
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
450 451
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
452 453
      scope->DropKids();
    }
Y
Yu Yang 已提交
454 455 456
  }
}

457 458
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
459
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
460 461 462
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
463 464
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

465
  PADDLE_ENFORCE(
466
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
467 468
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
469
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
470 471
      "Program in the prepared context should has fetch_ops.");

472 473 474 475 476
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
477 478
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
479 480 481
    }
  }

W
Wu Yi 已提交
482
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
483 484 485 486 487 488

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
489
      *(*fetch_targets)[fetch_target_name] =
490 491 492 493 494
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

495 496
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
497
  VLOG(3) << "use_mkldnn=True";
498 499 500 501 502 503 504 505
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
506 507 508
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
509 510
#endif
}
Q
qijun 已提交
511 512
}  // namespace framework
}  // namespace paddle