executor.cc 18.5 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
17
#include <memory>
18
#include <unordered_map>
19
#include <unordered_set>
S
sneaxiy 已提交
20
#include <utility>
D
dongdaxiang 已提交
21 22 23
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
Y
Yi Wang 已提交
24 25 26 27 28
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
D
dongdaxiang 已提交
29 30
#include "paddle/fluid/framework/trainer_desc.pb.h"
#include "paddle/fluid/framework/trainer_factory.h"
31
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
32
#include "paddle/fluid/framework/variable_helper.h"
33
#include "paddle/fluid/operators/controlflow/recurrent_op_helper.h"
S
sneaxiy 已提交
34
#include "paddle/fluid/operators/controlflow/while_op_helper.h"
W
Wu Yi 已提交
35
#include "paddle/fluid/operators/distributed/distributed.h"
Y
Yi Wang 已提交
36
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
37
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
38

39
#ifdef PADDLE_WITH_NGRAPH
B
baojun 已提交
40
#include "paddle/fluid/operators/ngraph/ngraph_engine.h"
41
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
42 43
#endif

D
dzhwinter 已提交
44
DECLARE_bool(benchmark);
45
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
Q
qijun 已提交
46 47 48

namespace paddle {
namespace framework {
X
Xin Pan 已提交
49 50 51 52 53
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
54

Q
Qiao Longfei 已提交
55
ExecutorPrepareContext::ExecutorPrepareContext(
S
sneaxiy 已提交
56 57 58 59 60 61 62 63
    const framework::ProgramDesc& prog, size_t block_id)
    : prog_(prog), block_id_(block_id) {}

void ExecutorPrepareContext::PrepareUnusedVars(
    const std::vector<std::string>& keep_vars, bool force_disable_gc) {
  force_disable_gc_ = force_disable_gc;
  if (GetEagerDeletionThreshold() < 0 || force_disable_gc_) {
    return;
S
sneaxiy 已提交
64
  }
S
sneaxiy 已提交
65
  unused_vars_ = GetUnusedVars(prog_.Block(block_id_), ops_, keep_vars);
S
sneaxiy 已提交
66
}
Y
Yu Yang 已提交
67

Q
Qiao Longfei 已提交
68
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
69
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
70
}
Y
Yu Yang 已提交
71

D
dzhwinter 已提交
72
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
73

Y
Yancey1989 已提交
74
void Executor::Close() {
W
Wu Yi 已提交
75
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
76 77
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
78 79 80
  auto client =
      paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  client->SendComplete();
W
Wu Yi 已提交
81
#endif
Y
Yancey1989 已提交
82
}
W
Wu Yi 已提交
83

L
Liu Yiqun 已提交
84 85 86
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
87 88 89 90 91 92 93 94 95 96 97 98 99 100

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
101
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
102 103
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
104 105
      } else {
        auto* ptr = scope->Var(var->Name());
106
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
107 108
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
109 110 111 112 113
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
114
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
115 116
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
117 118 119 120
    }
  }
}

D
dongdaxiang 已提交
121
void Executor::RunFromDataset(const ProgramDesc& main_program, Scope* scope,
X
xujiaqi01 已提交
122
                              Dataset* dataset,
D
dongdaxiang 已提交
123
                              const std::string& trainer_desc_str) {
D
dongdaxiang 已提交
124 125
  VLOG(3) << "Start to RunFromDataset in executor";
  TrainerDesc trainer_desc;
H
hutuxian 已提交
126 127 128
  bool success = trainer_desc.ParseFromString(trainer_desc_str);
  PADDLE_ENFORCE(success, "Fail to parse TrainerDesc from string:\n%s",
                 trainer_desc_str.c_str());
D
dongdaxiang 已提交
129 130 131 132 133 134 135 136
  VLOG(3) << "Going to create trainer, trainer class is "
          << trainer_desc.class_name();
  std::shared_ptr<TrainerBase> trainer;
  trainer = TrainerFactory::CreateTrainer(trainer_desc.class_name());
  // initialize trainer
  VLOG(3) << "Going to initialize trainer";
  trainer->Initialize(trainer_desc, dataset);
  VLOG(3) << "Set root scope here";
D
dongdaxiang 已提交
137
  trainer->SetScope(scope);
D
dongdaxiang 已提交
138 139 140 141 142 143 144 145 146 147 148 149
  // prepare training environment and helper environment
  VLOG(3) << "Try to init train environment";
  trainer->InitTrainerEnv(main_program, place_);
  VLOG(3) << "Try to init other environment";
  trainer->InitOtherEnv(main_program);
  // training and finalize training
  VLOG(3) << "Trainer starts to run";
  trainer->Run();
  VLOG(3) << "Trainer going to finalize";
  trainer->Finalize();
  return;
}
D
dongdaxiang 已提交
150

Y
Yu Yang 已提交
151
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
S
sneaxiy 已提交
152 153 154
                   bool create_local_scope, bool create_vars,
                   const std::vector<std::string>& skip_ref_cnt_vars,
                   bool force_disable_gc) {
X
Xin Pan 已提交
155
  platform::RecordBlock b(block_id);
156
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
S
sneaxiy 已提交
157
  auto ctx = Prepare(pdesc, block_id, skip_ref_cnt_vars, force_disable_gc);
Q
Qiao Longfei 已提交
158
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
159 160
}

161 162 163 164 165 166 167
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
168
    const BlockDesc& block,
L
Liu Yiqun 已提交
169
    const std::map<std::string, const LoDTensor*>& feed_targets,
170 171
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
172
  for (auto* op : block.AllOps()) {
173 174
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
175
      // The input variable's name of feed_op should be feed_holder_name.
176 177 178 179 180 181 182 183 184 185 186 187 188 189 190
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

191
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
192
      // When feed operator are present, so should be feed_holder.
193 194 195 196 197 198 199
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
200 201 202 203 204 205 206 207 208 209 210 211
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
212 213
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
214 215
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
216
  for (auto* op : block.AllOps()) {
217 218
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
219
      // The output variable's name of fetch_op should be fetch_holder_name.
220 221 222 223 224 225 226 227 228 229 230 231 232 233 234
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

235
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
236
      // When fetch operator are present, so should be fetch_holder.
237 238 239 240 241 242 243
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
244 245 246 247 248
  }

  return fetch_count > 0;
}

249 250 251
std::unique_ptr<ExecutorPrepareContext> Executor::PrepareCtxCache(
    const ProgramDesc& program, int block_id,
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
252
  return Prepare(program, block_id, skip_ref_cnt_vars, force_disable_gc);
253 254
}

255
void Executor::Run(const ProgramDesc& program, Scope* scope,
256 257
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
258 259
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
260
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
261
  platform::RecordBlock b(kProgramId);
262
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
263
  bool has_feed_ops =
264
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
265
  bool has_fetch_ops =
266
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
267 268

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
269
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
270
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
271 272
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
273
  }
274 275
  auto* global_block = copy_program->MutableBlock(0);

276
  if (!has_feed_ops) {
277 278
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
279
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
280 281 282
    feed_holder->SetPersistable(true);

    int i = 0;
283
    for (auto& feed_target : (*feed_targets)) {
284
      std::string var_name = feed_target.first;
M
minqiyang 已提交
285
      VLOG(3) << "feed target's name: " << var_name;
286 287 288 289 290 291 292 293 294 295 296 297 298

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

299
  if (!has_fetch_ops) {
300 301
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
302
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
303 304 305
    fetch_holder->SetPersistable(true);

    int i = 0;
306
    for (auto& fetch_target : (*fetch_targets)) {
307
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
308
      VLOG(3) << "fetch target's name: " << var_name;
309 310 311 312 313 314 315 316 317 318 319 320 321

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

322
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
323 324 325
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
326 327
}

Q
Qiao Longfei 已提交
328
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
329
    const ProgramDesc& program, int block_id,
S
sneaxiy 已提交
330
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
S
sneaxiy 已提交
331 332
  std::unique_ptr<ExecutorPrepareContext> ctx(
      new ExecutorPrepareContext(program, block_id));
Y
Yu Yang 已提交
333 334 335 336 337
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
338
#ifdef PADDLE_WITH_NGRAPH
339
  if (FLAGS_use_ngraph && ctx->block_id_ == 0) {
340 341 342 343
    paddle::operators::NgraphEngine::FuseNgraphOps(
        ctx->prog_.Block(ctx->block_id_), &ctx->ops_);
  }
#endif
S
sneaxiy 已提交
344
  ctx->PrepareUnusedVars(skip_ref_cnt_vars, force_disable_gc);
Q
Qiyang Min 已提交
345
  return ctx;
Y
Yu Yang 已提交
346 347
}

T
refine  
typhoonzero 已提交
348
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
349
    const ProgramDesc& program, const std::vector<int>& block_ids,
S
sneaxiy 已提交
350 351
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars,
    bool force_disable_gc) {
S
fix bug  
sneaxiy 已提交
352 353 354 355
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
356
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
357
  size_t idx = 0;
T
typhoonzero 已提交
358 359
  for (auto& bid : block_ids) {
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
S
sneaxiy 已提交
360
    auto* ctx = new ExecutorPrepareContext(program, bid);
T
typhoonzero 已提交
361 362 363 364
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
S
sneaxiy 已提交
365 366 367 368 369
    if (skip_ref_cnt_vars.empty()) {
      ctx->PrepareUnusedVars(std::vector<std::string>(), force_disable_gc);
    } else {
      ctx->PrepareUnusedVars(skip_ref_cnt_vars[idx], force_disable_gc);
    }
T
typhoonzero 已提交
370
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
371
    ++idx;
T
typhoonzero 已提交
372 373 374 375
  }
  return result;
}

Y
Yu Yang 已提交
376
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
377 378
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
379
  platform::RecordBlock b(kProgramId);
380
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
381 382 383 384
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
385 386
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
387
  }
Y
Yu Yang 已提交
388

S
sneaxiy 已提交
389
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
390
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
391 392 393
  // FIXME(zjl): recurrent_op is rather complex, we would
  // disable gc forcely in recurrent_op
  if (!ctx->force_disable_gc_ && max_memory_size >= 0) {
S
sneaxiy 已提交
394 395
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
396
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
397
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
398 399
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
400
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
401 402 403
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
404
#endif
S
sneaxiy 已提交
405 406
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
407 408 409
#ifdef PADDLE_WITH_CUDA
    }
#endif
S
sneaxiy 已提交
410 411
    // If gc is enabled and block size > 1
    if (gc && ctx->prog_.Size() > 1) {
S
sneaxiy 已提交
412 413
      operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp(ctx->block_id_,
                                                                 ctx->ops_);
414 415
      operators::PrepareSafeEagerDeletionOnRecurrentOpAndRecurrentGradOp(
          ctx->block_id_, ctx->ops_);
S
sneaxiy 已提交
416
    }
S
sneaxiy 已提交
417 418
  }

Y
Yu Yang 已提交
419
  for (auto& op : ctx->ops_) {
420
    op->Run(*local_scope, place_);
S
fix bug  
sneaxiy 已提交
421
    if (gc) {
S
sneaxiy 已提交
422
      DeleteUnusedTensors(*local_scope, op.get(), ctx->unused_vars_, gc.get());
S
sneaxiy 已提交
423
    }
Y
Yu Yang 已提交
424
  }
S
sneaxiy 已提交
425

S
fix bug  
sneaxiy 已提交
426
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
427

Q
qiaolongfei 已提交
428
  if (local_scope != scope) {
Y
Yu Yang 已提交
429
    scope->DeleteScope(local_scope);
430
  } else {
Q
qiaolongfei 已提交
431 432 433 434 435
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
436 437
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
438 439
      scope->DropKids();
    }
Y
Yu Yang 已提交
440 441 442
  }
}

443 444
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
445
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
446 447 448
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
449 450
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

451
  PADDLE_ENFORCE(
452
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
453 454
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
455
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
456 457
      "Program in the prepared context should has fetch_ops.");

458 459 460 461 462
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
463 464
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
465 466 467
    }
  }

W
Wu Yi 已提交
468
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
469 470 471 472 473 474

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
475
      *(*fetch_targets)[fetch_target_name] =
476 477 478 479 480
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

481 482
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
483
  VLOG(3) << "use_mkldnn=True";
484 485 486 487 488 489 490 491
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
492 493 494
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
495 496
#endif
}
Q
qijun 已提交
497 498
}  // namespace framework
}  // namespace paddle