executor.cc 23.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
16
#include <memory>
Y
Yi Wang 已提交
17
#include "paddle/fluid/framework/feed_fetch_method.h"
D
dongdaxiang 已提交
18 19
#include "paddle/fluid/framework/trainer_desc.pb.h"
#include "paddle/fluid/framework/trainer_factory.h"
Z
Zeng Jinle 已提交
20
#include "paddle/fluid/operators/controlflow/conditional_block_op_helper.h"
21
#include "paddle/fluid/operators/controlflow/recurrent_op_helper.h"
S
sneaxiy 已提交
22
#include "paddle/fluid/operators/controlflow/while_op_helper.h"
Y
Yi Wang 已提交
23
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
24
#include "paddle/fluid/platform/profiler.h"
25
#include "paddle/fluid/platform/profiler/event_tracing.h"
26 27 28
#ifdef PADDLE_WITH_MKLDNN
#include "paddle/fluid/platform/mkldnn_helper.h"
#endif
29
#include "paddle/fluid/framework/executor_gc_helper.h"
Y
Yang Yu 已提交
30

D
dzhwinter 已提交
31
DECLARE_bool(benchmark);
32
DECLARE_bool(use_mkldnn);
Q
qijun 已提交
33 34 35

namespace paddle {
namespace framework {
X
Xin Pan 已提交
36 37 38 39 40
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
41

Q
Qiao Longfei 已提交
42
ExecutorPrepareContext::ExecutorPrepareContext(
S
sneaxiy 已提交
43 44 45 46 47
    const framework::ProgramDesc& prog, size_t block_id)
    : prog_(prog), block_id_(block_id) {}

void ExecutorPrepareContext::PrepareUnusedVars(
    const std::vector<std::string>& keep_vars, bool force_disable_gc) {
Z
Zeng Jinle 已提交
48 49 50
  // If gc is enabled and block size > 1
  if (prog_.Size() > 1) {
    operators::PrepareSafeEagerDeletionOnConditionalOpAndConditionalGradOp(
51 52 53
        prog_, block_id_, ops_);
    operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp(prog_, block_id_,
                                                               ops_);
Z
Zeng Jinle 已提交
54
    operators::PrepareSafeEagerDeletionOnRecurrentOpAndRecurrentGradOp(
55
        prog_, block_id_, ops_);
Z
Zeng Jinle 已提交
56
  }
57 58 59 60 61 62

  force_disable_gc_ = force_disable_gc;
  if (GetEagerDeletionThreshold() < 0 || force_disable_gc_) {
    return;
  }

S
sneaxiy 已提交
63
  unused_vars_ = GetUnusedVars(prog_.Block(block_id_), ops_, keep_vars);
S
sneaxiy 已提交
64
}
Y
Yu Yang 已提交
65

Q
Qiao Longfei 已提交
66
ExecutorPrepareContext::~ExecutorPrepareContext() {
M
minqiyang 已提交
67
  VLOG(5) << "destroy ExecutorPrepareContext";
Q
Qiao Longfei 已提交
68
}
Y
Yu Yang 已提交
69

D
dzhwinter 已提交
70
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
71

72 73
Executor::~Executor() {
#ifdef PADDLE_WITH_MKLDNN
74
  // Clear mkl-dnn cache,
75
  // this is needed to have mkl-dnn unit tests working
76
  platform::ClearMKLDNNCache(place_, this);
77 78 79
#endif
}

Y
Yancey1989 已提交
80
void Executor::Close() {
T
tangwei12 已提交
81 82 83 84 85 86 87
  // #ifdef PADDLE_WITH_DISTRIBUTE
  //   // TODO(typhoonzero): complete message will need to use real trainer_id,
  //   // except 0.
  //   auto client =
  //       paddle::operators::distributed::RPCClient::GetInstance<RPCCLIENT_T>(0);
  //   client->SendComplete();
  // #endif
Y
Yancey1989 已提交
88
}
W
Wu Yi 已提交
89

L
Liu Yiqun 已提交
90 91
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
92
  VLOG(3) << "Creating Variables for block " << block_id;
L
Liu Yiqun 已提交
93
  auto& global_block = pdesc.Block(block_id);
94 95 96 97 98 99 100 101 102 103 104 105
  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }
  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
S
Steffy-zxf 已提交
106 107

        VLOG(3) << "Initialize Variable " << var->Name();
108
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
109
        VLOG(3) << "Create Variable " << var->Name()
S
Steffy-zxf 已提交
110 111
                << " global, which pointer is " << ptr << " type is "
                << static_cast<int>(var->GetType());
112 113
      } else {
        auto* ptr = scope->Var(var->Name());
114
        InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
115
        VLOG(3) << "Create Variable " << var->Name()
S
Steffy-zxf 已提交
116 117
                << " locally, which pointer is " << ptr << "Variable Type "
                << static_cast<int>(var->GetType());
118 119 120 121 122
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
123
      InitializeVariable(ptr, var->GetType());
M
minqiyang 已提交
124 125
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
126 127 128 129
    }
  }
}

130 131 132
std::shared_ptr<TrainerBase> Executor::InitForDataset(
    const ProgramDesc& main_program, const std::string& trainer_desc_str,
    Scope* scope, Dataset* dataset) {
133
  VLOG(3) << "Start to InitForDataset in executor";
D
dongdaxiang 已提交
134
  TrainerDesc trainer_desc;
H
hutuxian 已提交
135
  bool success = trainer_desc.ParseFromString(trainer_desc_str);
136 137 138 139
  PADDLE_ENFORCE_EQ(success, true,
                    platform::errors::PreconditionNotMet(
                        "Fail to parse TrainerDesc from string:\n%s",
                        trainer_desc_str.c_str()));
D
dongdaxiang 已提交
140 141 142 143 144 145 146 147
  VLOG(3) << "Going to create trainer, trainer class is "
          << trainer_desc.class_name();
  std::shared_ptr<TrainerBase> trainer;
  trainer = TrainerFactory::CreateTrainer(trainer_desc.class_name());
  // initialize trainer
  VLOG(3) << "Going to initialize trainer";
  trainer->Initialize(trainer_desc, dataset);
  VLOG(3) << "Set root scope here";
D
dongdaxiang 已提交
148
  trainer->SetScope(scope);
D
dongdaxiang 已提交
149 150 151 152 153
  // prepare training environment and helper environment
  VLOG(3) << "Try to init train environment";
  trainer->InitTrainerEnv(main_program, place_);
  VLOG(3) << "Try to init other environment";
  trainer->InitOtherEnv(main_program);
154 155 156 157
  return trainer;
}

void Executor::RunFromDataset(std::shared_ptr<TrainerBase> trainer) {
158 159 160
  PADDLE_ENFORCE_NOT_NULL(
      trainer, platform::errors::InvalidArgument(
                   "Trainer is nullptr, invoke InitForDataset first"));
D
dongdaxiang 已提交
161 162 163
  // training and finalize training
  VLOG(3) << "Trainer starts to run";
  trainer->Run();
D
Dong Daxiang 已提交
164 165 166
}

void Executor::ReleaseTrainer(std::shared_ptr<TrainerBase> trainer) {
D
dongdaxiang 已提交
167 168 169
  VLOG(3) << "Trainer going to finalize";
  trainer->Finalize();
}
D
dongdaxiang 已提交
170

Y
Yu Yang 已提交
171
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
S
sneaxiy 已提交
172 173
                   bool create_local_scope, bool create_vars,
                   const std::vector<std::string>& skip_ref_cnt_vars,
174
                   bool force_disable_gc, bool keep_kid_scopes) {
X
Xin Pan 已提交
175
  platform::RecordBlock b(block_id);
176
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
177 178 179
#ifdef PADDLE_WITH_MKLDNN
  platform::AttachPointerHashToMKLDNNKey(this, place_);
#endif
S
sneaxiy 已提交
180
  auto ctx = Prepare(pdesc, block_id, skip_ref_cnt_vars, force_disable_gc);
181 182
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars,
                     keep_kid_scopes);
Q
qijun 已提交
183 184
}

185 186 187 188 189 190 191
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
192
    const BlockDesc& block,
L
Liu Yiqun 已提交
193
    const std::map<std::string, const LoDTensor*>& feed_targets,
194 195
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
196
  for (auto* op : block.AllOps()) {
197 198
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
199
      // The input variable's name of feed_op should be feed_holder_name.
200 201 202 203 204
      PADDLE_ENFORCE_EQ(
          op->Input("X")[0], feed_holder_name,
          platform::errors::PreconditionNotMet(
              "Input to feed op should be '%s', but received '%s'.",
              feed_holder_name, op->Input("X")[0]));
205
      std::string feed_target_name = op->Output("Out")[0];
206 207 208 209 210
      PADDLE_ENFORCE_NE(feed_targets.find(feed_target_name), feed_targets.end(),
                        platform::errors::PreconditionNotMet(
                            "Feed operator output name '%s' cannot be found in "
                            "'feed_targets'",
                            feed_target_name));
211 212 213 214 215 216
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
217 218 219 220
        platform::errors::PreconditionNotMet(
            "The number of feed operators should match 'feed_targets', but "
            "received feed_count: %zu, required feed_targets.size(): %zu.",
            feed_count, feed_targets.size()));
221

222
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
223
      // When feed operator are present, so should be feed_holder.
224
      auto var = block.FindVar(feed_holder_name);
225 226 227 228 229 230 231 232 233 234
      PADDLE_ENFORCE_NOT_NULL(
          var,
          platform::errors::PreconditionNotMet(
              "Block should already have a '%s' variable", feed_holder_name));
      PADDLE_ENFORCE_EQ(
          var->GetType(), proto::VarType::FEED_MINIBATCH,
          platform::errors::PreconditionNotMet(
              "'%s' variable should be 'FEED_MINIBATCH' type, but received "
              "'%s'.",
              feed_holder_name, DataTypeToString(var->GetType())));
235
    }
236 237 238 239 240 241 242 243 244 245 246 247
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
248
    const BlockDesc& block,
249
    const std::map<std::string, FetchType*>& fetch_targets,
250 251
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
252
  for (auto* op : block.AllOps()) {
253 254
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
255
      // The output variable's name of fetch_op should be fetch_holder_name.
256 257 258 259 260
      PADDLE_ENFORCE_EQ(
          op->Output("Out")[0], fetch_holder_name,
          platform::errors::PreconditionNotMet(
              "Output of fetch op should be '%s', but received '%s'.",
              fetch_holder_name, op->Output("Out")[0]));
261
      std::string fetch_target_name = op->Input("X")[0];
262 263 264 265 266 267
      PADDLE_ENFORCE_NE(fetch_targets.find(fetch_target_name),
                        fetch_targets.end(),
                        platform::errors::NotFound(
                            "Fetch operator input name '%s' cannot be found in "
                            "'fetch_targets'.",
                            fetch_target_name));
268 269 270 271 272 273
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
274 275 276 277
        platform::errors::PreconditionNotMet(
            "The number of fetch operators should match 'fetch_targets', but "
            "received fetch_count: %zu, required fetch_targets.size(): %zu.",
            fetch_count, fetch_targets.size()));
278

279
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
280
      // When fetch operator are present, so should be fetch_holder.
281
      auto var = block.FindVar(fetch_holder_name);
282 283 284 285 286 287 288 289 290
      PADDLE_ENFORCE_NOT_NULL(
          var,
          platform::errors::PreconditionNotMet(
              "Block should already have a '%s' variable.", fetch_holder_name));
      PADDLE_ENFORCE_EQ(
          var->GetType(), proto::VarType::FETCH_LIST,
          platform::errors::PreconditionNotMet(
              "'%s' variable should be 'FETCH_LIST' type, but received '%s'.",
              fetch_holder_name, DataTypeToString(var->GetType())));
291
    }
292 293 294 295 296 297
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
298
                   std::map<std::string, const LoDTensor*>* feed_targets,
299
                   std::map<std::string, FetchType*>* fetch_targets,
W
Wu Yi 已提交
300 301
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
302
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
303
  platform::RecordBlock b(kProgramId);
304
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
305 306 307
#ifdef PADDLE_WITH_MKLDNN
  platform::AttachPointerHashToMKLDNNKey(this, place_);
#endif
308
  bool has_feed_ops =
309
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
310
  bool has_fetch_ops =
311
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
312 313

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
314
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
315
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
316 317
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
318
  }
319 320
  auto* global_block = copy_program->MutableBlock(0);

321
  if (!has_feed_ops) {
322 323
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
324
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
325 326 327
    feed_holder->SetPersistable(true);

    int i = 0;
328
    for (auto& feed_target : (*feed_targets)) {
329
      std::string var_name = feed_target.first;
M
minqiyang 已提交
330
      VLOG(3) << "feed target's name: " << var_name;
331 332 333 334 335 336 337 338 339 340 341 342 343

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

344
  if (!has_fetch_ops) {
345 346
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
347
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
348 349 350
    fetch_holder->SetPersistable(true);

    int i = 0;
351
    for (auto& fetch_target : (*fetch_targets)) {
352
      std::string var_name = fetch_target.first;
M
minqiyang 已提交
353
      VLOG(3) << "fetch target's name: " << var_name;
354 355 356 357 358 359 360 361 362 363 364 365 366

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

367
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
368 369 370
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
371 372
}

Q
Qiao Longfei 已提交
373
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
374
    const ProgramDesc& program, int block_id,
S
sneaxiy 已提交
375
    const std::vector<std::string>& skip_ref_cnt_vars, bool force_disable_gc) {
S
sneaxiy 已提交
376 377
  std::unique_ptr<ExecutorPrepareContext> ctx(
      new ExecutorPrepareContext(program, block_id));
378 379 380 381 382
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size(),
                    platform::errors::InvalidArgument(
                        "Input block id = %d, but it should be less than "
                        "program.size() which is %d",
                        static_cast<size_t>(block_id), program.Size()));
Y
Yu Yang 已提交
383 384 385 386
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
S
sneaxiy 已提交
387
  ctx->PrepareUnusedVars(skip_ref_cnt_vars, force_disable_gc);
Q
Qiyang Min 已提交
388
  return ctx;
Y
Yu Yang 已提交
389 390
}

T
refine  
typhoonzero 已提交
391
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
S
fix bug  
sneaxiy 已提交
392
    const ProgramDesc& program, const std::vector<int>& block_ids,
S
sneaxiy 已提交
393 394
    const std::vector<std::vector<std::string>>& skip_ref_cnt_vars,
    bool force_disable_gc) {
395
  PADDLE_ENFORCE_EQ(
S
fix bug  
sneaxiy 已提交
396
      skip_ref_cnt_vars.empty() || skip_ref_cnt_vars.size() == block_ids.size(),
397 398 399 400
      true,
      platform::errors::InvalidArgument("skip_ref_cnt_vars should be either "
                                        "empty or equals to block number %d",
                                        block_ids.size()));
T
typhoonzero 已提交
401
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
S
fix bug  
sneaxiy 已提交
402
  size_t idx = 0;
T
typhoonzero 已提交
403
  for (auto& bid : block_ids) {
404 405 406 407 408
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size(),
                      platform::errors::InvalidArgument(
                          "Input block id = %zu, but it should be less than "
                          "program.size() which is %zu",
                          static_cast<size_t>(bid), program.Size()));
S
sneaxiy 已提交
409
    auto* ctx = new ExecutorPrepareContext(program, bid);
T
typhoonzero 已提交
410 411 412 413
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
S
sneaxiy 已提交
414 415 416 417 418
    if (skip_ref_cnt_vars.empty()) {
      ctx->PrepareUnusedVars(std::vector<std::string>(), force_disable_gc);
    } else {
      ctx->PrepareUnusedVars(skip_ref_cnt_vars[idx], force_disable_gc);
    }
T
typhoonzero 已提交
419
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
S
fix bug  
sneaxiy 已提交
420
    ++idx;
T
typhoonzero 已提交
421 422 423 424
  }
  return result;
}

425 426 427 428 429
void Executor::RunPartialPreparedContext(ExecutorPrepareContext* ctx,
                                         Scope* scope, int64_t start_op_index,
                                         int64_t end_op_index,
                                         bool create_local_scope,
                                         bool create_vars, bool keep_kids) {
430
  platform::RecordBlock b(kProgramId);
431 432
  PADDLE_ENFORCE_NOT_NULL(
      scope, platform::errors::InvalidArgument("Scope shouldn't be null"));
Y
Yu Yang 已提交
433 434 435 436
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
437 438
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
439
  }
Y
Yu Yang 已提交
440

S
sneaxiy 已提交
441
  int64_t max_memory_size = GetEagerDeletionThreshold();
S
sneaxiy 已提交
442
  std::unique_ptr<GarbageCollector> gc;
S
sneaxiy 已提交
443
  if (!ctx->force_disable_gc_ && max_memory_size >= 0) {
S
sneaxiy 已提交
444
    if (platform::is_gpu_place(place_)) {
445
#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
S
fix bug  
sneaxiy 已提交
446
      if (IsFastEagerDeletionModeEnabled()) {
447
        gc.reset(new UnsafeFastGPUGarbageCollector(place_, max_memory_size));
S
fix bug  
sneaxiy 已提交
448
      } else {
449
        gc.reset(new DefaultStreamGarbageCollector(place_, max_memory_size));
S
fix bug  
sneaxiy 已提交
450
      }
451 452 453
#else
      PADDLE_THROW(
          platform::errors::Unimplemented("No GPU gc found in CPU/XPU paddle"));
S
sneaxiy 已提交
454
#endif
455
    } else if (platform::is_cpu_place(place_)) {
456
      gc.reset(new CPUGarbageCollector(place_, max_memory_size));
457 458
    } else if (platform::is_xpu_place(place_)) {
#ifdef PADDLE_WITH_XPU
459
      gc.reset(new XPUGarbageCollector(place_, max_memory_size));
460 461 462
#else
      PADDLE_THROW(
          platform::errors::Unimplemented("No XPU gc found in CPU/GPU paddle"));
J
jianghaicheng 已提交
463 464 465
#endif
    } else if (platform::is_ipu_place(place_)) {
#ifdef PADDLE_WITH_IPU
466
      gc.reset(new IPUGarbageCollector(place_, max_memory_size));
J
jianghaicheng 已提交
467 468 469
#else
      PADDLE_THROW(
          platform::errors::Unimplemented("No IPU gc found in CPU/IPU paddle"));
470 471 472
#endif
    } else if (platform::is_npu_place(place_)) {
#ifdef PADDLE_WITH_ASCEND_CL
473 474
      if (IsFastEagerDeletionModeEnabled()) {
        VLOG(4) << "Use unsafe fast gc for NPU.";
475
        gc.reset(new NPUUnsafeFastGarbageCollector(place_, max_memory_size));
476 477 478 479 480 481
      } else {
        PADDLE_THROW(platform::errors::Unimplemented(
            "Please set FLAGS_fast_eager_deletion_mode=true to use "
            "GarbageCollector on NPU."));
        // TODO(zhiqiu): fix bugs and enable NPUDefaultStreamGarbageCollector.
        VLOG(4) << "Use default stream gc for NPU.";
482
        gc.reset(new NPUDefaultStreamGarbageCollector(place_, max_memory_size));
483
      }
484
#else
485 486
      PADDLE_THROW(
          platform::errors::Unimplemented("No NPU gc found in CPU/NPU paddle"));
F
fwenguang 已提交
487 488 489 490
#endif
    } else if (platform::is_mlu_place(place_)) {
#ifdef PADDLE_WITH_MLU
      if (IsFastEagerDeletionModeEnabled()) {
491
        gc.reset(new MLUUnsafeFastGarbageCollector(place_, max_memory_size));
F
fwenguang 已提交
492
      } else {
493
        gc.reset(new MLUDefaultStreamGarbageCollector(place_, max_memory_size));
F
fwenguang 已提交
494 495 496 497
      }
#else
      PADDLE_THROW(
          platform::errors::Unimplemented("No MLU gc found in CPU/MLU paddle"));
498 499 500 501 502 503 504 505 506 507 508 509 510 511
#endif
    } else if (platform::is_custom_place(place_)) {
#ifdef PADDLE_WITH_CUSTOM_DEVICE
      if (IsFastEagerDeletionModeEnabled()) {
        VLOG(4) << "Use unsafe fast gc for " << place_ << ".";
        gc.reset(new CustomDeviceUnsafeFastGarbageCollector(place_,
                                                            max_memory_size));
      } else {
        VLOG(4) << "Use default stream gc for " << place_ << ".";
        gc.reset(
            new CustomDefaultStreamGarbageCollector(place_, max_memory_size));
      }
#else
      PADDLE_THROW(platform::errors::Unimplemented("No CustomDevice gc found"));
S
sneaxiy 已提交
512
#endif
513
    }
S
sneaxiy 已提交
514 515
  }

516 517
  for (int64_t i = start_op_index; i < end_op_index; ++i) {
    auto& op = ctx->ops_[i];
518
    op->Run(*local_scope, place_);
S
fix bug  
sneaxiy 已提交
519
    if (gc) {
S
sneaxiy 已提交
520
      DeleteUnusedTensors(*local_scope, op.get(), ctx->unused_vars_, gc.get());
S
sneaxiy 已提交
521
    }
Y
Yu Yang 已提交
522
  }
S
sneaxiy 已提交
523

L
Leo Chen 已提交
524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544
  auto callback = [scope, local_scope, keep_kids]() {
    if (local_scope != scope) {
      VLOG(4) << "Delete scope: " << local_scope;
      scope->DeleteScope(local_scope);
    } else {
      if (!keep_kids) {
        VLOG(4) << "Drop kids: " << scope;
        // By default, we should delete all kid scopes after run executor
        // because
        // some operators may create local scope when running, such as while_op.
        // But when while_op also create a local executor to run it's sub block,
        // the sub scopes it created should not be dropped immediately, because
        // while_grad_op will use some variables created during while_op run, so
        // we need to keep the kids and wait for the outer executor to drop
        // them.

        scope->DropKids();
      }
      VLOG(4) << "Keep kids: " << scope;
    }
  };
S
sneaxiy 已提交
545

L
Leo Chen 已提交
546 547 548
  if (gc) {
    VLOG(4) << "Async deleting scope";
    gc->DirectClearCallback(callback);
549
  } else {
L
Leo Chen 已提交
550 551 552
    VLOG(4) << "Sync deleting scope";
    platform::DeviceContextPool::Instance().Get(place_)->Wait();
    callback();
Y
Yu Yang 已提交
553 554 555
  }
}

556 557 558 559 560 561 562 563 564
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
  int64_t start_op_index = 0;
  int64_t end_op_index = ctx->ops_.size();
  RunPartialPreparedContext(ctx, scope, start_op_index, end_op_index,
                            create_local_scope, create_vars, keep_kids);
}

565 566
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
567
    std::map<std::string, const LoDTensor*>* feed_targets,
568
    std::map<std::string, FetchType*>* fetch_targets, bool create_local_scope,
W
Wu Yi 已提交
569 570
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
571 572
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

573 574 575 576 577
  PADDLE_ENFORCE_EQ(
      has_feed_operators(global_block, *feed_targets, feed_holder_name), true,
      platform::errors::PreconditionNotMet(
          "Program in ExecutorPrepareContext should has feed_ops."));
  PADDLE_ENFORCE_EQ(
578
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
579 580
      true, platform::errors::PreconditionNotMet(
                "Program in the prepared context should has fetch_ops."));
581

582 583 584 585
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
586
      int idx = BOOST_GET_CONST(int, op->GetAttr("col"));
587 588
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
589 590 591
    }
  }

W
Wu Yi 已提交
592
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
593 594 595 596 597

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
598
      int idx = BOOST_GET_CONST(int, op->GetAttr("col"));
599
      *(*fetch_targets)[fetch_target_name] =
600 601 602 603 604
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

605 606
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
M
minqiyang 已提交
607
  VLOG(3) << "use_mkldnn=True";
608 609 610 611 612 613 614 615
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
616 617 618
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
619 620
#endif
}
Q
qijun 已提交
621 622
}  // namespace framework
}  // namespace paddle