executor.cc 20.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
S
sneaxiy 已提交
16
#include <deque>
17
#include <memory>
18
#include <unordered_map>
19
#include <unordered_set>
S
sneaxiy 已提交
20
#include <utility>
D
dongdaxiang 已提交
21 22 23
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
Y
Yi Wang 已提交
24 25 26 27 28
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
D
dongdaxiang 已提交
29 30
#include "paddle/fluid/framework/trainer_desc.pb.h"
#include "paddle/fluid/framework/trainer_factory.h"
31
#include "paddle/fluid/framework/transfer_scope_cache.h"
W
Wang Guibao 已提交
32
#include "paddle/fluid/framework/variable_helper.h"
Z
Zeng Jinle 已提交
33
#include "paddle/fluid/operators/controlflow/conditional_block_op_helper.h"
34
#include "paddle/fluid/operators/controlflow/recurrent_op_helper.h"
S
sneaxiy 已提交
35
#include "paddle/fluid/operators/controlflow/while_op_helper.h"
W
Wu Yi 已提交
36
#include "paddle/fluid/operators/distributed/distributed.h"
Y
Yi Wang 已提交
37
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
38
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
39

40
#ifdef PADDLE_WITH_NGRAPH
B
baojun 已提交
41
#include "paddle/fluid/operators/ngraph/ngraph_engine.h"
42 43
#endif

D
dzhwinter 已提交
44
DECLARE_bool(benchmark);
45
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
46
DEFINE_bool(use_ngraph, false, "Use NGRAPH to run");
Q
qijun 已提交
47 48 49

namespace paddle {
namespace framework {
X
Xin Pan 已提交
50 51 52 53 54
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
55

Q
Qiao Longfei 已提交
56
ExecutorPrepareContext::ExecutorPrepareContext(
S
sneaxiy 已提交
57 58 59 60 61
    const framework::ProgramDesc& prog, size_t block_id)
    : prog_(prog), block_id_(block_id) {}

void ExecutorPrepareContext::PrepareUnusedVars(
    const std::vector<std::string>& keep_vars, bool force_disable_gc) {
Z
Zeng Jinle 已提交
62 63 64 65 66 67 68 69 70 71 72 73 74 75
#ifdef PADDLE_WITH_NGRAPH
  if (FLAGS_use_ngraph) {
    // FIXME(zjl): There is difference when ngraph and gc are both enabled
    // in unittests. I do not know why it happens. Maybe ngraph engine
    // would cache some variables?
    LOG_FIRST_N(WARNING, 1)
        << "FLAGS_use_ngraph=True, garbage collection strategy is "
           "disabled in Executor";
    force_disable_gc = true;
  }
#endif
  // If gc is enabled and block size > 1
  if (prog_.Size() > 1) {
    operators::PrepareSafeEagerDeletionOnConditionalOpAndConditionalGradOp(
76 77 78
        prog_, block_id_, ops_);
    operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp(prog_, block_id_,
                                                               ops_);
Z
Zeng Jinle 已提交
79
    operators::PrepareSafeEagerDeletionOnRecurrentOpAndRecurrentGradOp(
80
        prog_, block_id_, ops_);
Z
Zeng Jinle 已提交
81
  }
82 83 84 85 86 87

  force_disable_gc_ = force_disable_gc;
  if (GetEagerDeletionThreshold() < 0 || force_disable_gc_) {
    return;
  }

S
sneaxiy 已提交
88
  unused_vars_ = GetUnusedVars(prog_.Block(block_id_), ops_, keep_vars);
S
sneaxiy 已提交
89
}
Y
Yu Yang 已提交
90

Q
Qiao Longfei 已提交
91
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
92
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
93
}
Y
Yu Yang 已提交
94

D
dzhwinter 已提交
95
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
96

97 98 99 100 101 102 103 104 105 106
Executor::~Executor() {
#ifdef PADDLE_WITH_MKLDNN
  // Clear mkl-dnn cache, unless explicitly
  // (as set in constructor) marked not to do so
  // this is needed to have mkl-dnn unit tests working
  if (platform::is_cpu_place(place_)) {
    platform::DeviceContextPool& pool = platform::DeviceContextPool::Instance();
    platform::MKLDNNDeviceContext* dev_ctx =
        (platform::MKLDNNDeviceContext*)pool.Get(place_);
    dev_ctx->ResetBlobMap();
107
    platform::set_cur_paddle_data_layout(paddle::framework::DataLayout::kNCHW);
108 109 110 111
  }
#endif
}

Y
Yancey1989 已提交
112
void Executor::Close() {
W
Wu Yi 已提交
113
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
114 115
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
116 117 118
  auto client =
      paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  client->SendComplete();
W
Wu Yi 已提交
119
#endif
Y
Yancey1989 已提交
120
}
W
Wu Yi 已提交
121

L
Liu Yiqun 已提交
122 123
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
124
  VLOG(3) << "Creating Variables for block " << block_id;
L
Liu Yiqun 已提交
125
  auto& global_block = pdesc.Block(block_id);
126 127 128 129 130 131 132 133 134 135 136 137
  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }
  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
138
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
139 140
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
141 142
      } else {
        auto* ptr = scope->Var(var->Name());
143
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
144 145
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
146 147 148 149 150
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
151
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
152 153
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
154 155 156 157
    }
  }
}

158 159 160
std::shared_ptr<TrainerBase> Executor::InitForDataset(
    const ProgramDesc& main_program, const std::string& trainer_desc_str,
    Scope* scope, Dataset* dataset) {
D
dongdaxiang 已提交
161 162
  VLOG(3) << "Start to RunFromDataset in executor";
  TrainerDesc trainer_desc;
H
hutuxian 已提交
163
  bool success = trainer_desc.ParseFromString(trainer_desc_str);
164 165
  PADDLE_ENFORCE_EQ(success, true, "Fail to parse TrainerDesc from string:\n%s",
                    trainer_desc_str.c_str());
D
dongdaxiang 已提交
166 167 168 169 170 171 172 173
  VLOG(3) << "Going to create trainer, trainer class is "
          << trainer_desc.class_name();
  std::shared_ptr<TrainerBase> trainer;
  trainer = TrainerFactory::CreateTrainer(trainer_desc.class_name());
  // initialize trainer
  VLOG(3) << "Going to initialize trainer";
  trainer->Initialize(trainer_desc, dataset);
  VLOG(3) << "Set root scope here";
D
dongdaxiang 已提交
174
  trainer->SetScope(scope);
D
dongdaxiang 已提交
175 176 177 178 179
  // prepare training environment and helper environment
  VLOG(3) << "Try to init train environment";
  trainer->InitTrainerEnv(main_program, place_);
  VLOG(3) << "Try to init other environment";
  trainer->InitOtherEnv(main_program);
180 181 182 183 184 185
  return trainer;
}

void Executor::RunFromDataset(std::shared_ptr<TrainerBase> trainer) {
  PADDLE_ENFORCE_NE(trainer, nullptr,
                    "Trainer is nullptr, invoke InitForDataset first");
D
dongdaxiang 已提交
186 187 188
  // training and finalize training
  VLOG(3) << "Trainer starts to run";
  trainer->Run();
D
Dong Daxiang 已提交
189 190 191
}

void Executor::ReleaseTrainer(std::shared_ptr<TrainerBase> trainer) {
D
dongdaxiang 已提交
192 193 194
  VLOG(3) << "Trainer going to finalize";
  trainer->Finalize();
}
D
dongdaxiang 已提交
195

Y
Yu Yang 已提交
196
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
S
sneaxiy 已提交
197 198
                   bool create_local_scope, bool create_vars,
                   const std::vector<std::string>& skip_ref_cnt_vars,
199
                   bool force_disable_gc, bool keep_kid_scopes) {
X
Xin Pan 已提交
200
  platform::RecordBlock b(block_id);
201
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
S
sneaxiy 已提交
202
  auto ctx = Prepare(pdesc, block_id, skip_ref_cnt_vars, force_disable_gc);
203 204
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars,
                     keep_kid_scopes);
Q
qijun 已提交
205 206
}

207 208 209 210 211 212 213
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
214
    const BlockDesc& block,
L
Liu Yiqun 已提交
215
    const std::map<std::string, const LoDTensor*>& feed_targets,
216 217
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
218
  for (auto* op : block.AllOps()) {
219 220
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
221
      // The input variable's name of feed_op should be feed_holder_name.
222 223 224 225 226 227 228 229 230 231 232 233 234 235 236
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

237
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
238
      // When feed operator are present, so should be feed_holder.
239 240 241 242 243 244 245
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
246 247 248 249 250 251 252 253 254 255 256 257
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
258 259
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
260 261
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
262
  for (auto* op : block.AllOps()) {
263 264
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
265
      // The output variable's name of fetch_op should be fetch_holder_name.
266 267 268 269 270 271 272 273 274 275 276 277 278 279 280
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

281
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
282
      // When fetch operator are present, so should be fetch_holder.
283 284 285 286 287 288 289
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
290 291 292 293 294 295
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
296 297
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
298 299
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
300
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
301
  platform::RecordBlock b(kProgramId);
302
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
303
  bool has_feed_ops =
304
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
305
  bool has_fetch_ops =
306
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
307 308

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
309
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
310
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
311 312
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
313
  }
314 315
  auto* global_block = copy_program->MutableBlock(0);

316
  if (!has_feed_ops) {
317 318
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
319
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
320 321 322
    feed_holder->SetPersistable(true);

    int i = 0;
323
    for (auto& feed_target : (*feed_targets)) {
324
      std::string var_name = feed_target.first;
M
minqiyang 已提交
325
      VLOG(3) << "feed target's name: " << var_name;
326 327 328 329 330 331 332 333 334 335 336 337 338

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

339
  if (!has_fetch_ops) {
340 341
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
342
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
343 344 345
    fetch_holder->SetPersistable(true);

    int i = 0;
346
    for (auto& fetch_target : (*fetch_targets)) {
347
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
348
      VLOG(3) << "fetch target's name: " << var_name;
349 350 351 352 353 354 355 356 357 358 359 360 361

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

362
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
363 364 365
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
366 367
}

Q
Qiao Longfei 已提交
368
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
369
    const ProgramDesc& program, int block_id,
S
sneaxiy 已提交
370
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
S
sneaxiy 已提交
371 372
  std::unique_ptr<ExecutorPrepareContext> ctx(
      new ExecutorPrepareContext(program, block_id));
Y
Yu Yang 已提交
373 374 375 376 377
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
378
#ifdef PADDLE_WITH_NGRAPH
379
  if (FLAGS_use_ngraph && ctx->block_id_ == 0) {
380 381 382 383
    paddle::operators::NgraphEngine::FuseNgraphOps(
        ctx->prog_.Block(ctx->block_id_), &ctx->ops_);
  }
#endif
S
sneaxiy 已提交
384
  ctx->PrepareUnusedVars(skip_ref_cnt_vars, force_disable_gc);
Q
Qiyang Min 已提交
385
  return ctx;
Y
Yu Yang 已提交
386 387
}

T
refine  
typhoonzero 已提交
388
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
389
    const ProgramDesc& program, const std::vector<int>& block_ids,
S
sneaxiy 已提交
390 391
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars,
    bool force_disable_gc) {
S
fix bug  
sneaxiy 已提交
392 393 394 395
  PADDLE_ENFORCE(
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
      "skip_ref_cnt_vars should be either empty or equals to block number %d",
      block_ids.size());
T
typhoonzero 已提交
396
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
397
  size_t idx = 0;
T
typhoonzero 已提交
398 399
  for (auto& bid : block_ids) {
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
S
sneaxiy 已提交
400
    auto* ctx = new ExecutorPrepareContext(program, bid);
T
typhoonzero 已提交
401 402 403 404
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
S
sneaxiy 已提交
405 406 407 408 409
    if (skip_ref_cnt_vars.empty()) {
      ctx->PrepareUnusedVars(std::vector<std::string>(), force_disable_gc);
    } else {
      ctx->PrepareUnusedVars(skip_ref_cnt_vars[idx], force_disable_gc);
    }
T
typhoonzero 已提交
410
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
411
    ++idx;
T
typhoonzero 已提交
412 413 414 415
  }
  return result;
}

416 417 418 419 420
void Executor::RunPartialPreparedContext(ExecutorPrepareContext* ctx,
                                         Scope* scope, int64_t start_op_index,
                                         int64_t end_op_index,
                                         bool create_local_scope,
                                         bool create_vars, bool keep_kids) {
421
  platform::RecordBlock b(kProgramId);
422
  PADDLE_ENFORCE_NOT_NULL(scope);
Y
Yu Yang 已提交
423 424 425 426
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
427 428
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
429
  }
Y
Yu Yang 已提交
430

S
sneaxiy 已提交
431
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
432
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
433
  if (!ctx->force_disable_gc_ && max_memory_size >= 0) {
S
sneaxiy 已提交
434 435
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
S
fix bug  
sneaxiy 已提交
436
      if (IsFastEagerDeletionModeEnabled()) {
S
sneaxiy 已提交
437
        gc.reset(new UnsafeFastGPUGarbageCollector(
S
fix bug  
sneaxiy 已提交
438 439
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      } else {
S
sneaxiy 已提交
440
        gc.reset(new DefaultStreamGarbageCollector(
S
fix bug  
sneaxiy 已提交
441 442 443
            boost::get<platform::CUDAPlace>(place_), max_memory_size));
      }
    } else if (platform::is_cpu_place(place_)) {
S
sneaxiy 已提交
444
#endif
S
sneaxiy 已提交
445 446
      gc.reset(new CPUGarbageCollector(boost::get<platform::CPUPlace>(place_),
                                       max_memory_size));
S
sneaxiy 已提交
447 448 449 450 451
#ifdef PADDLE_WITH_CUDA
    }
#endif
  }

452 453
  for (int64_t i = start_op_index; i < end_op_index; ++i) {
    auto& op = ctx->ops_[i];
454
    op->Run(*local_scope, place_);
S
fix bug  
sneaxiy 已提交
455
    if (gc) {
S
sneaxiy 已提交
456
      DeleteUnusedTensors(*local_scope, op.get(), ctx->unused_vars_, gc.get());
S
sneaxiy 已提交
457
    }
Y
Yu Yang 已提交
458
  }
S
sneaxiy 已提交
459

S
fix bug  
sneaxiy 已提交
460
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
S
sneaxiy 已提交
461

Q
qiaolongfei 已提交
462
  if (local_scope != scope) {
Y
Yu Yang 已提交
463
    scope->DeleteScope(local_scope);
464
  } else {
Q
qiaolongfei 已提交
465 466 467 468 469
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
470 471
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
472

Q
qiaolongfei 已提交
473 474
      scope->DropKids();
    }
Y
Yu Yang 已提交
475 476 477
  }
}

478 479 480 481 482 483 484 485 486
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
  int64_t start_op_index = 0;
  int64_t end_op_index = ctx->ops_.size();
  RunPartialPreparedContext(ctx, scope, start_op_index, end_op_index,
                            create_local_scope, create_vars, keep_kids);
}

487 488
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
489
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
490 491 492
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
493 494
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

495
  PADDLE_ENFORCE(
496
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
497 498
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
499
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
500 501
      "Program in the prepared context should has fetch_ops.");

502 503 504 505 506
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
507 508
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
509 510 511
    }
  }

W
Wu Yi 已提交
512
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
513 514 515 516 517 518

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
519
      *(*fetch_targets)[fetch_target_name] =
520 521 522 523 524
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

525 526
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
527
  VLOG(3) << "use_mkldnn=True";
528 529 530 531 532 533 534 535
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
536 537 538
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
539 540
#endif
}
Q
qijun 已提交
541 542
}  // namespace framework
}  // namespace paddle