executor.cc 18.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
S
sneaxiy 已提交
17 18 19 20
#include <memory>
#include <unordered_map>
#include <unordered_set>
#include <utility>
Y
Yang Yang 已提交
21

Y
Yi Wang 已提交
22 23 24 25 26
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
27
#include "paddle/fluid/framework/threadpool.h"
28
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
29
#include "paddle/fluid/framework/variable_helper.h"
S
sneaxiy 已提交
30
#include "paddle/fluid/operators/controlflow/while_op_helper.h"
W
Wu Yi 已提交
31
#include "paddle/fluid/operators/distributed/distributed.h"
Y
Yi Wang 已提交
32
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
33
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
34

35
#ifdef PADDLE_WITH_NGRAPH
B
baojun 已提交
36
#include "paddle/fluid/operators/ngraph/ngraph_engine.h"
37 38
#endif

D
dzhwinter 已提交
39
DECLARE_bool(benchmark);
40
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
B
baojun-nervana 已提交
41
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
Q
qijun 已提交
42 43 44

namespace paddle {
namespace framework {
X
Xin Pan 已提交
45 46 47 48 49
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
50

S
fix bug  
sneaxiy 已提交
51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
static std::unordered_map<std::string, size_t> GetNonPersistableReferenceCounts(
    const BlockDesc& block, const std::vector<std::string>& skip_var_list) {
  std::unordered_map<std::string, size_t> ref_cnts;
  std::unordered_set<std::string> skip_vars(skip_var_list.begin(),
                                            skip_var_list.end());

  auto update_ref_cnts = [&](OpDesc* op_desc, const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        if (skip_vars.count(name)) continue;
        auto* var_desc = block.FindVar(name);
        if (var_desc == nullptr || var_desc->Persistable()) continue;
        auto type = var_desc->Proto()->type().type();
        if (type != proto::VarType::LOD_TENSOR &&
            type != proto::VarType::SELECTED_ROWS &&
            type != proto::VarType::LOD_TENSOR_ARRAY) {
          continue;
        }
S
sneaxiy 已提交
69
        ++ref_cnts[name];
S
fix bug  
sneaxiy 已提交
70 71 72 73 74 75 76 77 78 79 80
      }
    }
  };

  for (auto op_desc : block.AllOps()) {
    update_ref_cnts(op_desc, op_desc->Inputs());
    update_ref_cnts(op_desc, op_desc->Outputs());
  }
  return ref_cnts;
}

Q
Qiao Longfei 已提交
81
ExecutorPrepareContext::ExecutorPrepareContext(
S
fix bug  
sneaxiy 已提交
82 83
    const framework::ProgramDesc& prog, size_t block_id,
    const std::vector<std::string>& skip_ref_cnt_vars)
S
sneaxiy 已提交
84 85
    : prog_(prog), block_id_(block_id) {
  if (GetEagerDeletionThreshold() >= 0) {
S
sneaxiy 已提交
86 87
    global_ref_cnts_ = GetNonPersistableReferenceCounts(prog.Block(block_id),
                                                        skip_ref_cnt_vars);
S
sneaxiy 已提交
88 89
  }
}
Y
Yu Yang 已提交
90

Q
Qiao Longfei 已提交
91
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
92
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
93
}
Y
Yu Yang 已提交
94

S
fix bug  
sneaxiy 已提交
95
static void DeleteUnusedTensors(
S
sneaxiy 已提交
96
    const Scope& scope, const OperatorBase* op, GarbageCollector* gc,
S
fix bug  
sneaxiy 已提交
97
    std::unordered_map<std::string, size_t>* ref_cnts) {
S
sneaxiy 已提交
98
  std::deque<std::shared_ptr<memory::Allocation>> garbages;
S
sneaxiy 已提交
99 100 101 102 103 104

  auto handler = [&](const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        auto it = ref_cnts->find(name);
        if (it == ref_cnts->end()) continue;
S
sneaxiy 已提交
105 106 107 108
        if (--(it->second) != 0) {
          continue;
        }
        auto* var = scope.FindVar(name);
S
sneaxiy 已提交
109
        if (var == nullptr) {
S
sneaxiy 已提交
110 111 112 113 114 115
          continue;
        }

        VLOG(2) << "Erase variable " << name;
        if (var->IsType<LoDTensor>()) {
          garbages.emplace_back(
S
sneaxiy 已提交
116 117 118 119 120
              var->GetMutable<LoDTensor>()->MoveMemoryHolder());
        } else if (var->IsType<SelectedRows>()) {
          garbages.emplace_back(var->GetMutable<SelectedRows>()
                                    ->mutable_value()
                                    ->MoveMemoryHolder());
S
sneaxiy 已提交
121 122 123
        } else if (var->IsType<LoDTensorArray>()) {
          auto* lod_tensor_arr = var->GetMutable<LoDTensorArray>();
          for (auto& t : *lod_tensor_arr) {
S
sneaxiy 已提交
124
            garbages.emplace_back(t.MoveMemoryHolder());
S
sneaxiy 已提交
125
          }
S
sneaxiy 已提交
126 127
        } else {
          PADDLE_THROW("Type %s of %s is not supported eager deletion",
S
sneaxiy 已提交
128
                       framework::ToTypeName(var->Type()), name);
S
sneaxiy 已提交
129 130 131 132 133 134 135 136
        }
      }
    }
  };

  handler(op->Inputs());
  handler(op->Outputs());

S
sneaxiy 已提交
137 138
  if (!garbages.empty()) {
    gc->Add(std::move(garbages));
S
sneaxiy 已提交
139 140 141
  }
}

D
dzhwinter 已提交
142
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
143

Y
Yancey1989 已提交
144
void Executor::Close() {
W
Wu Yi 已提交
145
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
146 147
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
148 149 150
  auto client =
      paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  client->SendComplete();
W
Wu Yi 已提交
151
#endif
Y
Yancey1989 已提交
152
}
W
Wu Yi 已提交
153

L
Liu Yiqun 已提交
154 155 156
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
157 158 159 160 161 162 163 164 165 166 167 168 169 170

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
171
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
172 173
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
174 175
      } else {
        auto* ptr = scope->Var(var->Name());
176
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
177 178
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
179 180 181 182 183
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
184
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
185 186
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
187 188 189 190
    }
  }
}

Y
Yu Yang 已提交
191
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
T
typhoonzero 已提交
192
                   bool create_local_scope, bool create_vars) {
X
Xin Pan 已提交
193
  platform::RecordBlock b(block_id);
194
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
B
baojun 已提交
195 196 197
#ifdef PADDLE_WITH_NGRAPH
  if (FLAGS_use_ngraph) operators::NgraphEngine::EnableNgraph(pdesc);
#endif
Q
Qiao Longfei 已提交
198 199
  auto ctx = Prepare(pdesc, block_id);
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
200 201
}

202 203 204 205 206 207 208
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
209
    const BlockDesc& block,
L
Liu Yiqun 已提交
210
    const std::map<std::string, const LoDTensor*>& feed_targets,
211 212
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
213
  for (auto* op : block.AllOps()) {
214 215
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
216
      // The input variable's name of feed_op should be feed_holder_name.
217 218 219 220 221 222 223 224 225 226 227 228 229 230 231
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

232
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
233
      // When feed operator are present, so should be feed_holder.
234 235 236 237 238 239 240
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
241 242 243 244 245 246 247 248 249 250 251 252
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
253 254
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
255 256
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
257
  for (auto* op : block.AllOps()) {
258 259
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
260
      // The output variable's name of fetch_op should be fetch_holder_name.
261 262 263 264 265 266 267 268 269 270 271 272 273 274 275
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

276
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
277
      // When fetch operator are present, so should be fetch_holder.
278 279 280 281 282 283 284
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
285 286 287 288 289 290
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
291 292
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
293 294
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
295
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
296
  platform::RecordBlock b(kProgramId);
297
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
298
  bool has_feed_ops =
299
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
300
  bool has_fetch_ops =
301
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
302 303

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
304
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
305
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
306 307
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
308
  }
309 310
  auto* global_block = copy_program->MutableBlock(0);

311
  if (!has_feed_ops) {
312 313
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
314
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
315 316 317
    feed_holder->SetPersistable(true);

    int i = 0;
318
    for (auto& feed_target : (*feed_targets)) {
319
      std::string var_name = feed_target.first;
M
minqiyang 已提交
320
      VLOG(3) << "feed target's name: " << var_name;
321 322 323 324 325 326 327 328 329 330 331 332 333

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

334
  if (!has_fetch_ops) {
335 336
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
337
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
338 339 340
    fetch_holder->SetPersistable(true);

    int i = 0;
341
    for (auto& fetch_target : (*fetch_targets)) {
342
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
343
      VLOG(3) << "fetch target's name: " << var_name;
344 345 346 347 348 349 350 351 352 353 354 355 356

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

357
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
358 359 360
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
361 362
}

Q
Qiao Longfei 已提交
363
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
364 365
    const ProgramDesc& program, int block_id,
    const std::vector<std::string>& skip_ref_cnt_vars) {
Q
Qiyang Min 已提交
366
  std::unique_ptr<ExecutorPrepareContext> ctx(
S
fix bug  
sneaxiy 已提交
367
      new ExecutorPrepareContext(program, block_id, skip_ref_cnt_vars));
Y
Yu Yang 已提交
368 369 370 371 372
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
Q
Qiyang Min 已提交
373
  return ctx;
Y
Yu Yang 已提交
374 375
}

T
refine  
typhoonzero 已提交
376
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
377 378 379 380 381 382
    const ProgramDesc& program, const std::vector<int>& block_ids,
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars) {
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
383
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
384
  size_t idx = 0;
T
typhoonzero 已提交
385
  for (auto& bid : block_ids) {
S
fix bug  
sneaxiy 已提交
386 387 388 389 390 391
    ExecutorPrepareContext* ctx;
    if (skip_ref_cnt_vars.empty()) {
      ctx = new ExecutorPrepareContext(program, bid);
    } else {
      ctx = new ExecutorPrepareContext(program, bid, skip_ref_cnt_vars[idx]);
    }
T
typhoonzero 已提交
392 393 394 395 396 397
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
398
    ++idx;
T
typhoonzero 已提交
399 400 401 402
  }
  return result;
}

Y
Yu Yang 已提交
403
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
404 405
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
406
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
407 408 409 410
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
411 412
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
413
  }
Y
Yu Yang 已提交
414

S
sneaxiy 已提交
415
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
416
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
417
  if (max_memory_size >= 0) {
S
sneaxiy 已提交
418
    ctx->ResetReferenceCount();
S
sneaxiy 已提交
419 420
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
421
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
422
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
423 424
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
425
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
426 427 428
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
429
#endif
S
sneaxiy 已提交
430 431
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
432 433 434
#ifdef PADDLE_WITH_CUDA
    }
#endif
S
sneaxiy 已提交
435
    if (gc && keep_kids) {
S
sneaxiy 已提交
436 437 438
      operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp(ctx->block_id_,
                                                                 ctx->ops_);
    }
S
sneaxiy 已提交
439 440
  }

Y
Yu Yang 已提交
441
  for (auto& op : ctx->ops_) {
442
    op->Run(*local_scope, place_);
S
sneaxiy 已提交
443

S
fix bug  
sneaxiy 已提交
444
    if (gc) {
S
sneaxiy 已提交
445
      DeleteUnusedTensors(*local_scope, op.get(), gc.get(),
S
sneaxiy 已提交
446
                          &(ctx->runtime_ref_cnts_));
S
sneaxiy 已提交
447
    }
Y
Yu Yang 已提交
448
  }
S
sneaxiy 已提交
449

S
fix bug  
sneaxiy 已提交
450
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
451

Q
qiaolongfei 已提交
452
  if (local_scope != scope) {
Y
Yu Yang 已提交
453
    scope->DeleteScope(local_scope);
454
  } else {
Q
qiaolongfei 已提交
455 456 457 458 459
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
460 461
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
462 463
      scope->DropKids();
    }
Y
Yu Yang 已提交
464 465 466
  }
}

467 468
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
469
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
470 471 472
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
473 474
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

475
  PADDLE_ENFORCE(
476
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
477 478
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
479
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
480 481
      "Program in the prepared context should has fetch_ops.");

482 483 484 485 486
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
487 488
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
489 490 491
    }
  }

W
Wu Yi 已提交
492
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
493 494 495 496 497 498

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
499
      *(*fetch_targets)[fetch_target_name] =
500 501 502 503 504
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

505 506
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
507
  VLOG(3) << "use_mkldnn=True";
508 509 510 511 512 513 514 515
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
516 517 518
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
519 520
#endif
}
Q
qijun 已提交
521 522
}  // namespace framework
}  // namespace paddle