executor.cc 18.9 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
S
sneaxiy 已提交
17 18 19 20
#include <memory>
#include <unordered_map>
#include <unordered_set>
#include <utility>
Y
Yang Yang 已提交
21

Y
Yi Wang 已提交
22 23 24 25 26
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
27
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
28
#include "paddle/fluid/framework/variable_helper.h"
W
Wu Yi 已提交
29
#include "paddle/fluid/operators/distributed/distributed.h"
Y
Yi Wang 已提交
30
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
31
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
32

33
#ifdef PADDLE_WITH_NGRAPH
B
baojun 已提交
34
#include "paddle/fluid/operators/ngraph/ngraph_engine.h"
35 36
#endif

D
dzhwinter 已提交
37
DECLARE_bool(benchmark);
38
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
B
baojun-nervana 已提交
39
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
Q
qijun 已提交
40 41 42

namespace paddle {
namespace framework {
X
Xin Pan 已提交
43 44 45 46 47
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
48

S
fix bug  
sneaxiy 已提交
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66
static std::unordered_map<std::string, size_t> GetNonPersistableReferenceCounts(
    const BlockDesc& block, const std::vector<std::string>& skip_var_list) {
  std::unordered_map<std::string, size_t> ref_cnts;
  std::unordered_set<std::string> skip_vars(skip_var_list.begin(),
                                            skip_var_list.end());

  auto update_ref_cnts = [&](OpDesc* op_desc, const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        if (skip_vars.count(name)) continue;
        auto* var_desc = block.FindVar(name);
        if (var_desc == nullptr || var_desc->Persistable()) continue;
        auto type = var_desc->Proto()->type().type();
        if (type != proto::VarType::LOD_TENSOR &&
            type != proto::VarType::SELECTED_ROWS &&
            type != proto::VarType::LOD_TENSOR_ARRAY) {
          continue;
        }
S
sneaxiy 已提交
67
        ++ref_cnts[name];
S
fix bug  
sneaxiy 已提交
68 69 70 71 72 73 74 75 76 77 78
      }
    }
  };

  for (auto op_desc : block.AllOps()) {
    update_ref_cnts(op_desc, op_desc->Inputs());
    update_ref_cnts(op_desc, op_desc->Outputs());
  }
  return ref_cnts;
}

Q
Qiao Longfei 已提交
79
ExecutorPrepareContext::ExecutorPrepareContext(
S
fix bug  
sneaxiy 已提交
80
    const framework::ProgramDesc& prog, size_t block_id,
S
sneaxiy 已提交
81 82 83 84 85
    const std::vector<std::string>& keep_vars, bool force_disable_gc)
    : prog_(prog), block_id_(block_id), force_disable_gc_(force_disable_gc) {
  if (GetEagerDeletionThreshold() >= 0 && !force_disable_gc_) {
    global_ref_cnts_ =
        GetNonPersistableReferenceCounts(prog.Block(block_id), keep_vars);
S
sneaxiy 已提交
86 87
  }
}
Y
Yu Yang 已提交
88

Q
Qiao Longfei 已提交
89
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
90
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
91
}
Y
Yu Yang 已提交
92

S
fix bug  
sneaxiy 已提交
93
static void DeleteUnusedTensors(
S
sneaxiy 已提交
94
    const Scope& scope, const OperatorBase* op, GarbageCollector* gc,
S
fix bug  
sneaxiy 已提交
95
    std::unordered_map<std::string, size_t>* ref_cnts) {
S
sneaxiy 已提交
96
  std::deque<std::shared_ptr<memory::Allocation>> garbages;
S
sneaxiy 已提交
97 98 99 100 101 102

  auto handler = [&](const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        auto it = ref_cnts->find(name);
        if (it == ref_cnts->end()) continue;
S
sneaxiy 已提交
103 104 105 106
        if (--(it->second) != 0) {
          continue;
        }
        auto* var = scope.FindVar(name);
S
sneaxiy 已提交
107
        if (var == nullptr) {
S
sneaxiy 已提交
108 109 110 111 112 113
          continue;
        }

        VLOG(2) << "Erase variable " << name;
        if (var->IsType<LoDTensor>()) {
          garbages.emplace_back(
S
sneaxiy 已提交
114 115 116 117 118
              var->GetMutable<LoDTensor>()->MoveMemoryHolder());
        } else if (var->IsType<SelectedRows>()) {
          garbages.emplace_back(var->GetMutable<SelectedRows>()
                                    ->mutable_value()
                                    ->MoveMemoryHolder());
S
sneaxiy 已提交
119 120 121
        } else if (var->IsType<LoDTensorArray>()) {
          auto* lod_tensor_arr = var->GetMutable<LoDTensorArray>();
          for (auto& t : *lod_tensor_arr) {
S
sneaxiy 已提交
122
            garbages.emplace_back(t.MoveMemoryHolder());
S
sneaxiy 已提交
123
          }
S
sneaxiy 已提交
124 125
        } else {
          PADDLE_THROW("Type %s of %s is not supported eager deletion",
S
sneaxiy 已提交
126
                       framework::ToTypeName(var->Type()), name);
S
sneaxiy 已提交
127 128 129 130 131 132 133 134
        }
      }
    }
  };

  handler(op->Inputs());
  handler(op->Outputs());

S
sneaxiy 已提交
135 136
  if (!garbages.empty()) {
    gc->Add(std::move(garbages));
S
sneaxiy 已提交
137 138 139
  }
}

D
dzhwinter 已提交
140
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
141

Y
Yancey1989 已提交
142
void Executor::Close() {
W
Wu Yi 已提交
143
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
144 145
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
146 147 148
  auto client =
      paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  client->SendComplete();
W
Wu Yi 已提交
149
#endif
Y
Yancey1989 已提交
150
}
W
Wu Yi 已提交
151

L
Liu Yiqun 已提交
152 153 154
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
155 156 157 158 159 160 161 162 163 164 165 166 167 168

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
169
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
170 171
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
172 173
      } else {
        auto* ptr = scope->Var(var->Name());
174
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
175 176
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
177 178 179 180 181
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
182
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
183 184
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
185 186 187 188
    }
  }
}

Y
Yu Yang 已提交
189
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
S
sneaxiy 已提交
190 191 192
                   bool create_local_scope, bool create_vars,
                   const std::vector<std::string>& skip_ref_cnt_vars,
                   bool force_disable_gc) {
X
Xin Pan 已提交
193
  platform::RecordBlock b(block_id);
194
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
B
baojun 已提交
195 196 197
#ifdef PADDLE_WITH_NGRAPH
  if (FLAGS_use_ngraph) operators::NgraphEngine::EnableNgraph(pdesc);
#endif
S
sneaxiy 已提交
198
  auto ctx = Prepare(pdesc, block_id, skip_ref_cnt_vars, force_disable_gc);
Q
Qiao Longfei 已提交
199
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
200 201
}

202 203 204 205 206 207 208
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
209
    const BlockDesc& block,
L
Liu Yiqun 已提交
210
    const std::map<std::string, const LoDTensor*>& feed_targets,
211 212
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
213
  for (auto* op : block.AllOps()) {
214 215
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
216
      // The input variable's name of feed_op should be feed_holder_name.
217 218 219 220 221 222 223 224 225 226 227 228 229 230 231
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

232
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
233
      // When feed operator are present, so should be feed_holder.
234 235 236 237 238 239 240
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
241 242 243 244 245 246 247 248 249 250 251 252
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
253 254
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
255 256
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
257
  for (auto* op : block.AllOps()) {
258 259
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
260
      // The output variable's name of fetch_op should be fetch_holder_name.
261 262 263 264 265 266 267 268 269 270 271 272 273 274 275
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

276
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
277
      // When fetch operator are present, so should be fetch_holder.
278 279 280 281 282 283 284
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
285 286 287 288 289 290
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
291 292
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
293 294
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
295
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
296
  platform::RecordBlock b(kProgramId);
297
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
298
  bool has_feed_ops =
299
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
300
  bool has_fetch_ops =
301
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
302 303

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
304
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
305
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
306 307
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
308
  }
309 310
  auto* global_block = copy_program->MutableBlock(0);

311
  if (!has_feed_ops) {
312 313
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
314
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
315 316 317
    feed_holder->SetPersistable(true);

    int i = 0;
318
    for (auto& feed_target : (*feed_targets)) {
319
      std::string var_name = feed_target.first;
M
minqiyang 已提交
320
      VLOG(3) << "feed target's name: " << var_name;
321 322 323 324 325 326 327 328 329 330 331 332 333

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

334
  if (!has_fetch_ops) {
335 336
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
337
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
338 339 340
    fetch_holder->SetPersistable(true);

    int i = 0;
341
    for (auto& fetch_target : (*fetch_targets)) {
342
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
343
      VLOG(3) << "fetch target's name: " << var_name;
344 345 346 347 348 349 350 351 352 353 354 355 356

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

357
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
358 359 360
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
361 362
}

Q
Qiao Longfei 已提交
363
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
364
    const ProgramDesc& program, int block_id,
S
sneaxiy 已提交
365 366 367
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
  std::unique_ptr<ExecutorPrepareContext> ctx(new ExecutorPrepareContext(
      program, block_id, skip_ref_cnt_vars, force_disable_gc));
Y
Yu Yang 已提交
368 369 370 371 372
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
Q
Qiyang Min 已提交
373
  return ctx;
Y
Yu Yang 已提交
374 375
}

T
refine  
typhoonzero 已提交
376
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
377
    const ProgramDesc& program, const std::vector<int>& block_ids,
S
sneaxiy 已提交
378 379
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars,
    bool force_disable_gc) {
S
fix bug  
sneaxiy 已提交
380 381 382 383
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
384
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
385
  size_t idx = 0;
T
typhoonzero 已提交
386
  for (auto& bid : block_ids) {
S
fix bug  
sneaxiy 已提交
387 388
    ExecutorPrepareContext* ctx;
    if (skip_ref_cnt_vars.empty()) {
S
sneaxiy 已提交
389 390
      ctx = new ExecutorPrepareContext(program, bid, std::vector<std::string>(),
                                       force_disable_gc);
S
fix bug  
sneaxiy 已提交
391
    } else {
S
sneaxiy 已提交
392 393
      ctx = new ExecutorPrepareContext(program, bid, skip_ref_cnt_vars[idx],
                                       force_disable_gc);
S
fix bug  
sneaxiy 已提交
394
    }
T
typhoonzero 已提交
395 396 397 398 399 400
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
401
    ++idx;
T
typhoonzero 已提交
402 403 404 405
  }
  return result;
}

Y
Yu Yang 已提交
406
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
407 408
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
409
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
410 411 412 413
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
414 415
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
416
  }
Y
Yu Yang 已提交
417

S
sneaxiy 已提交
418
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
419
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
420 421 422
  // FIXME(zjl): recurrent_op is rather complex, we would
  // disable gc forcely in recurrent_op
  if (!ctx->force_disable_gc_ && max_memory_size >= 0 && !keep_kids) {
S
sneaxiy 已提交
423
    ctx->ResetReferenceCount();
S
sneaxiy 已提交
424 425
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
426
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
427
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
428 429
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
430
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
431 432 433
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
434
#endif
S
sneaxiy 已提交
435 436
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
437 438 439 440 441
#ifdef PADDLE_WITH_CUDA
    }
#endif
  }

Y
Yu Yang 已提交
442
  for (auto& op : ctx->ops_) {
443
    op->Run(*local_scope, place_);
S
sneaxiy 已提交
444

S
fix bug  
sneaxiy 已提交
445
    if (gc) {
S
sneaxiy 已提交
446
      DeleteUnusedTensors(*local_scope, op.get(), gc.get(),
S
sneaxiy 已提交
447
                          &(ctx->runtime_ref_cnts_));
S
sneaxiy 已提交
448
    }
Y
Yu Yang 已提交
449
  }
S
sneaxiy 已提交
450

S
fix bug  
sneaxiy 已提交
451
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
452

Q
qiaolongfei 已提交
453
  if (local_scope != scope) {
Y
Yu Yang 已提交
454
    scope->DeleteScope(local_scope);
455
  } else {
Q
qiaolongfei 已提交
456 457 458 459 460
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
461 462
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
463 464
      scope->DropKids();
    }
Y
Yu Yang 已提交
465 466 467
  }
}

468 469
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
470
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
471 472 473
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
474 475
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

476
  PADDLE_ENFORCE(
477
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
478 479
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
480
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
481 482
      "Program in the prepared context should has fetch_ops.");

483 484 485 486 487
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
488 489
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
490 491 492
    }
  }

W
Wu Yi 已提交
493
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
494 495 496 497 498 499

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
500
      *(*fetch_targets)[fetch_target_name] =
501 502 503 504 505
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

506 507
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
508
  VLOG(3) << "use_mkldnn=True";
509 510 511 512 513 514 515 516
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
517 518 519
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
520 521
#endif
}
Q
qijun 已提交
522 523
}  // namespace framework
}  // namespace paddle