executor.cc 18.9 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
Y
Yang Yang 已提交
17

Y
Yi Wang 已提交
18 19 20 21 22
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
23
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
24
#include "paddle/fluid/framework/variable_helper.h"
G
gongweibao 已提交
25
#include "paddle/fluid/operators/detail/macros.h"
Y
Yi Wang 已提交
26
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
27
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
28

29 30 31 32
#ifdef PADDLE_WITH_NGRAPH
#include "paddle/fluid/framework/ngraph_operator.h"
#endif

D
dzhwinter 已提交
33
DECLARE_bool(benchmark);
34
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
B
baojun-nervana 已提交
35
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
Q
qijun 已提交
36 37 38

namespace paddle {
namespace framework {
X
Xin Pan 已提交
39 40 41 42 43
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
44

S
fix bug  
sneaxiy 已提交
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
static std::unordered_map<std::string, size_t> GetNonPersistableReferenceCounts(
    const BlockDesc& block, const std::vector<std::string>& skip_var_list) {
  std::unordered_map<std::string, size_t> ref_cnts;
  std::unordered_set<std::string> skip_vars(skip_var_list.begin(),
                                            skip_var_list.end());

  auto update_ref_cnts = [&](OpDesc* op_desc, const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        if (skip_vars.count(name)) continue;
        auto* var_desc = block.FindVar(name);
        if (var_desc == nullptr || var_desc->Persistable()) continue;
        auto type = var_desc->Proto()->type().type();
        if (type != proto::VarType::LOD_TENSOR &&
            type != proto::VarType::SELECTED_ROWS &&
            type != proto::VarType::LOD_TENSOR_ARRAY) {
          continue;
        }
S
sneaxiy 已提交
63
        ++ref_cnts[name];
S
fix bug  
sneaxiy 已提交
64 65 66 67 68 69 70 71 72 73 74
      }
    }
  };

  for (auto op_desc : block.AllOps()) {
    update_ref_cnts(op_desc, op_desc->Inputs());
    update_ref_cnts(op_desc, op_desc->Outputs());
  }
  return ref_cnts;
}

Q
Qiao Longfei 已提交
75
ExecutorPrepareContext::ExecutorPrepareContext(
S
fix bug  
sneaxiy 已提交
76 77
    const framework::ProgramDesc& prog, size_t block_id,
    const std::vector<std::string>& skip_ref_cnt_vars)
S
sneaxiy 已提交
78 79
    : prog_(prog), block_id_(block_id) {
  if (GetEagerDeletionThreshold() >= 0) {
S
sneaxiy 已提交
80 81
    global_ref_cnts_ = GetNonPersistableReferenceCounts(prog.Block(block_id),
                                                        skip_ref_cnt_vars);
S
sneaxiy 已提交
82 83
  }
}
Y
Yu Yang 已提交
84

Q
Qiao Longfei 已提交
85
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
86
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
87
}
Y
Yu Yang 已提交
88

S
fix bug  
sneaxiy 已提交
89
static void DeleteUnusedTensors(
S
sneaxiy 已提交
90
    const Scope& scope, const OperatorBase* op, GarbageCollector* gc,
S
fix bug  
sneaxiy 已提交
91
    std::unordered_map<std::string, size_t>* ref_cnts) {
S
sneaxiy 已提交
92
  std::deque<std::shared_ptr<memory::Allocation>> garbages;
S
sneaxiy 已提交
93 94 95 96 97 98

  auto handler = [&](const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        auto it = ref_cnts->find(name);
        if (it == ref_cnts->end()) continue;
S
sneaxiy 已提交
99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116
        if (--(it->second) != 0) {
          continue;
        }
        auto* var = scope.FindVar(name);
        if (var != nullptr) {
          continue;
        }

        VLOG(2) << "Erase variable " << name;
        if (var->IsType<LoDTensor>()) {
          garbages.emplace_back(var->GetMutable<LoDTensor>()->MoveMemory());
        } else if (var->IsType<SelectedRows>()) {
          garbages.emplace_back(
              var->GetMutable<SelectedRows>()->mutable_value()->MoveMemory());
        } else if (var->IsType<LoDTensorArray>()) {
          auto* lod_tensor_arr = var->GetMutable<LoDTensorArray>();
          for (auto& t : *lod_tensor_arr) {
            garbages.emplace_back(t.MoveMemory());
S
sneaxiy 已提交
117
          }
S
sneaxiy 已提交
118 119 120
        } else {
          PADDLE_THROW("Type %s of %s is not supported eager deletion",
                       var->Type().name(), name);
S
sneaxiy 已提交
121 122 123 124 125 126 127 128
        }
      }
    }
  };

  handler(op->Inputs());
  handler(op->Outputs());

S
sneaxiy 已提交
129 130
  if (!garbages.empty()) {
    gc->Add(std::move(garbages));
S
sneaxiy 已提交
131 132 133
  }
}

B
baojun-nervana 已提交
134 135 136
static void EnableFusedOp(ExecutorPrepareContext* ctx) {
#ifdef PADDLE_WITH_NGRAPH
  VLOG(3) << "use_ngraph=True";
B
baojun-nervana 已提交
137
  auto intervals = NgraphOperator::NgraphOpIntervals(&ctx->ops_);
B
baojun-nervana 已提交
138
  for (auto& interval : intervals) {
B
baojun-nervana 已提交
139 140 141
    auto* ng_op = new NgraphOperator(ctx->prog_, ctx->block_id_, interval.at(0),
                                     interval.at(1));
    *interval[0] = std::unique_ptr<OperatorBase>(ng_op);
B
baojun-nervana 已提交
142 143 144 145 146 147 148 149 150 151
  }
  for (auto it = intervals.rbegin(); it != intervals.rend(); ++it) {
    ctx->ops_.erase(it->at(0) + 1, it->at(1));
  }
#else
  LOG(WARNING)
      << "'NGRAPH' is not supported, Please re-compile with WITH_NGRAPH option";
#endif
}

D
dzhwinter 已提交
152
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
153

Y
Yancey1989 已提交
154
void Executor::Close() {
W
Wu Yi 已提交
155
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
156 157
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
Y
Yancey1989 已提交
158
  ::paddle::operators::distributed::RPCClient::GetInstance<
W
Wu Yi 已提交
159
      ::paddle::operators::distributed::GRPCClient>(0)
Y
Yancey1989 已提交
160
      ->SendComplete();
W
Wu Yi 已提交
161
#endif
Y
Yancey1989 已提交
162
}
W
Wu Yi 已提交
163

L
Liu Yiqun 已提交
164 165 166
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
167 168 169 170 171 172 173 174 175 176 177 178 179 180

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
181
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
182 183
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
184 185
      } else {
        auto* ptr = scope->Var(var->Name());
186
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
187 188
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
189 190 191 192 193
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
194
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
195 196
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
197 198 199 200
    }
  }
}

Y
Yu Yang 已提交
201
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
T
typhoonzero 已提交
202
                   bool create_local_scope, bool create_vars) {
X
Xin Pan 已提交
203
  platform::RecordBlock b(block_id);
204
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
Q
Qiao Longfei 已提交
205 206
  auto ctx = Prepare(pdesc, block_id);
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
207 208
}

209 210 211 212 213 214 215
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
216
    const BlockDesc& block,
L
Liu Yiqun 已提交
217
    const std::map<std::string, const LoDTensor*>& feed_targets,
218 219
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
220
  for (auto* op : block.AllOps()) {
221 222
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
223
      // The input variable's name of feed_op should be feed_holder_name.
224 225 226 227 228 229 230 231 232 233 234 235 236 237 238
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

239
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
240
      // When feed operator are present, so should be feed_holder.
241 242 243 244 245 246 247
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
248 249 250 251 252 253 254 255 256 257 258 259
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
260 261
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
262 263
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
264
  for (auto* op : block.AllOps()) {
265 266
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
267
      // The output variable's name of fetch_op should be fetch_holder_name.
268 269 270 271 272 273 274 275 276 277 278 279 280 281 282
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

283
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
284
      // When fetch operator are present, so should be fetch_holder.
285 286 287 288 289 290 291
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
292 293 294 295 296 297
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
298 299
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
300 301
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
302
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
303
  platform::RecordBlock b(kProgramId);
304
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
305
  bool has_feed_ops =
306
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
307
  bool has_fetch_ops =
308
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
309 310

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
311
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
312
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
313 314
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
315
  }
316 317
  auto* global_block = copy_program->MutableBlock(0);

318
  if (!has_feed_ops) {
319 320
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
321
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
322 323 324
    feed_holder->SetPersistable(true);

    int i = 0;
325
    for (auto& feed_target : (*feed_targets)) {
326
      std::string var_name = feed_target.first;
M
minqiyang 已提交
327
      VLOG(3) << "feed target's name: " << var_name;
328 329 330 331 332 333 334 335 336 337 338 339 340

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

341
  if (!has_fetch_ops) {
342 343
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
344
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
345 346 347
    fetch_holder->SetPersistable(true);

    int i = 0;
348
    for (auto& fetch_target : (*fetch_targets)) {
349
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
350
      VLOG(3) << "fetch target's name: " << var_name;
351 352 353 354 355 356 357 358 359 360 361 362 363

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

364
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
365 366 367
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
368 369
}

Q
Qiao Longfei 已提交
370
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
371 372
    const ProgramDesc& program, int block_id,
    const std::vector<std::string>& skip_ref_cnt_vars) {
Q
Qiyang Min 已提交
373
  std::unique_ptr<ExecutorPrepareContext> ctx(
S
fix bug  
sneaxiy 已提交
374
      new ExecutorPrepareContext(program, block_id, skip_ref_cnt_vars));
Y
Yu Yang 已提交
375 376 377 378 379
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
B
baojun-nervana 已提交
380
  if (FLAGS_use_ngraph) EnableFusedOp(ctx.get());
Q
Qiyang Min 已提交
381
  return ctx;
Y
Yu Yang 已提交
382 383
}

T
refine  
typhoonzero 已提交
384
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
385 386 387 388 389 390
    const ProgramDesc& program, const std::vector<int>& block_ids,
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars) {
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
391
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
392
  size_t idx = 0;
T
typhoonzero 已提交
393
  for (auto& bid : block_ids) {
S
fix bug  
sneaxiy 已提交
394 395 396 397 398 399
    ExecutorPrepareContext* ctx;
    if (skip_ref_cnt_vars.empty()) {
      ctx = new ExecutorPrepareContext(program, bid);
    } else {
      ctx = new ExecutorPrepareContext(program, bid, skip_ref_cnt_vars[idx]);
    }
T
typhoonzero 已提交
400 401 402 403 404 405
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
406
    ++idx;
T
typhoonzero 已提交
407 408 409 410
  }
  return result;
}

Y
Yu Yang 已提交
411
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
412 413
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
414
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
415 416 417 418
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
419 420
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
421
  }
Y
Yu Yang 已提交
422

S
sneaxiy 已提交
423
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
424
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
425 426
  // skip while_op and while_grad_op temporarily
  if (max_memory_size >= 0 && !keep_kids) {
S
sneaxiy 已提交
427
    ctx->ResetReferenceCount();
S
sneaxiy 已提交
428 429
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
430
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
431
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
432 433
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
434
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
435 436 437
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
438
#endif
S
sneaxiy 已提交
439 440
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
441 442 443 444 445
#ifdef PADDLE_WITH_CUDA
    }
#endif
  }

Y
Yu Yang 已提交
446
  for (auto& op : ctx->ops_) {
447
    op->Run(*local_scope, place_);
S
sneaxiy 已提交
448

S
fix bug  
sneaxiy 已提交
449
    if (gc) {
S
sneaxiy 已提交
450
      DeleteUnusedTensors(*local_scope, op.get(), gc.get(),
S
sneaxiy 已提交
451
                          &(ctx->runtime_ref_cnts_));
S
sneaxiy 已提交
452
    }
Y
Yu Yang 已提交
453
  }
S
sneaxiy 已提交
454

S
fix bug  
sneaxiy 已提交
455
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
456

Q
qiaolongfei 已提交
457
  if (local_scope != scope) {
Y
Yu Yang 已提交
458
    scope->DeleteScope(local_scope);
459
  } else {
Q
qiaolongfei 已提交
460 461 462 463 464
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
465 466
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
467 468
      scope->DropKids();
    }
Y
Yu Yang 已提交
469 470 471
  }
}

472 473
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
474
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
475 476 477
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
478 479
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

480
  PADDLE_ENFORCE(
481
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
482 483
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
484
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
485 486
      "Program in the prepared context should has fetch_ops.");

487 488 489 490 491
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
492 493
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
494 495 496
    }
  }

W
Wu Yi 已提交
497
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
498 499 500 501 502 503

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
504
      *(*fetch_targets)[fetch_target_name] =
505 506 507 508 509
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

510 511
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
512
  VLOG(3) << "use_mkldnn=True";
513 514 515 516 517 518 519 520
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
521 522 523
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
524 525
#endif
}
Q
qijun 已提交
526 527
}  // namespace framework
}  // namespace paddle