executor.cc 17.8 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

D
dzhwinter 已提交
15 16
#include <algorithm>

Y
Yi Wang 已提交
17
#include "paddle/fluid/framework/executor.h"
Y
Yang Yang 已提交
18

Y
Yi Wang 已提交
19 20 21 22 23
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
G
gongweibao 已提交
24
#include "paddle/fluid/operators/detail/macros.h"
Y
Yi Wang 已提交
25
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
26
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
27

D
dzhwinter 已提交
28
DECLARE_bool(benchmark);
29
DEFINE_bool(use_mkldnn, false, "Use MKLDNN to run");
Q
qijun 已提交
30 31 32

namespace paddle {
namespace framework {
X
Xin Pan 已提交
33 34 35 36 37
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
38

Q
Qiao Longfei 已提交
39 40
ExecutorPrepareContext::ExecutorPrepareContext(
    const framework::ProgramDesc& prog, size_t block_id)
S
sneaxiy 已提交
41 42 43 44 45
    : prog_(prog), block_id_(block_id) {
  if (GetEagerDeletionThreshold() >= 0) {
    ref_cnts_ = GetNonPersistableReferenceCount<int>(prog_, block_id_);
  }
}
Y
Yu Yang 已提交
46

Q
Qiao Longfei 已提交
47 48 49
ExecutorPrepareContext::~ExecutorPrepareContext() {
  VLOG(5) << "destroy ExecutorPrepareContext";
}
Y
Yu Yang 已提交
50

D
dzhwinter 已提交
51
#ifndef _WIN32
S
sneaxiy 已提交
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
template <typename RefCntMap>
static void DeleteUnusedTensors(const Scope& scope, const OperatorBase* op,
                                GarbageCollector<Tensor>* gc,
                                RefCntMap* ref_cnts) {
  std::unordered_set<Tensor*> erase_tensors;

  auto handler = [&](const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        auto it = ref_cnts->find(name);
        if (it == ref_cnts->end()) continue;
        if ((it->second)-- == 1) {
          auto* var = scope.FindVar(name);
          if (var != nullptr) {
            VLOG(10) << "Erase tensor \'" << name << "\'";
            if (var->IsType<LoDTensor>()) {
              erase_tensors.insert(var->GetMutable<LoDTensor>());
            } else if (var->IsType<SelectedRows>()) {
              erase_tensors.insert(
                  var->GetMutable<SelectedRows>()->mutable_value());
            }
          }
        }
      }
    }
  };

  handler(op->Inputs());
  handler(op->Outputs());

  if (!erase_tensors.empty()) {
    gc->Add(erase_tensors);
  }
}
D
dzhwinter 已提交
86
#endif
S
sneaxiy 已提交
87

D
dzhwinter 已提交
88
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
89

Y
Yancey1989 已提交
90
void Executor::Close() {
W
Wu Yi 已提交
91
#ifdef PADDLE_WITH_DISTRIBUTE
W
Wu Yi 已提交
92 93
  // TODO(typhoonzero): complete message will need to use real trainer_id,
  // except 0.
Y
Yancey1989 已提交
94
  ::paddle::operators::distributed::RPCClient::GetInstance<
W
Wu Yi 已提交
95
      ::paddle::operators::distributed::GRPCClient>(0)
Y
Yancey1989 已提交
96
      ->SendComplete();
W
Wu Yi 已提交
97
#endif
Y
Yancey1989 已提交
98
}
W
Wu Yi 已提交
99

Y
Stash  
Yu Yang 已提交
100
void InitializeVariable(Variable* var, proto::VarType::Type var_type) {
101
  if (var_type == proto::VarType::LOD_TENSOR) {
Q
QI JUN 已提交
102
    var->GetMutable<LoDTensor>();
103
  } else if (var_type == proto::VarType::SELECTED_ROWS) {
Q
QI JUN 已提交
104
    var->GetMutable<SelectedRows>();
105
  } else if (var_type == proto::VarType::FEED_MINIBATCH) {
Q
QI JUN 已提交
106
    var->GetMutable<FeedFetchList>();
107
  } else if (var_type == proto::VarType::FETCH_LIST) {
Q
QI JUN 已提交
108
    var->GetMutable<FeedFetchList>();
109
  } else if (var_type == proto::VarType::STEP_SCOPES) {
X
Xin Pan 已提交
110
    var->GetMutable<std::vector<framework::Scope*>>();
111
  } else if (var_type == proto::VarType::LOD_RANK_TABLE) {
Y
Yu Yang 已提交
112
    var->GetMutable<LoDRankTable>();
113
  } else if (var_type == proto::VarType::LOD_TENSOR_ARRAY) {
Y
Yu Yang 已提交
114
    var->GetMutable<LoDTensorArray>();
115
  } else if (var_type == proto::VarType::PLACE_LIST) {
Y
Yang Yu 已提交
116
    var->GetMutable<platform::PlaceList>();
117
  } else if (var_type == proto::VarType::READER) {
F
fengjiayi 已提交
118
    var->GetMutable<ReaderHolder>();
T
typhoonzero 已提交
119 120
  } else if (var_type == proto::VarType::RAW) {
    // GetMutable will be called in operator
Q
QI JUN 已提交
121 122
  } else {
    PADDLE_THROW(
Y
Yu Yang 已提交
123
        "Variable type %d is not in "
F
fengjiayi 已提交
124
        "[LOD_TENSOR, SELECTED_ROWS, FEED_MINIBATCH, FETCH_LIST, "
X
Xin Pan 已提交
125
        "LOD_RANK_TABLE, PLACE_LIST, READER, RAW]",
Y
Yu Yang 已提交
126
        var_type);
Q
QI JUN 已提交
127 128 129
  }
}

L
Liu Yiqun 已提交
130 131 132
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope,
                               int block_id) {
  auto& global_block = pdesc.Block(block_id);
133 134 135 136 137 138 139 140 141 142 143 144 145 146

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
147
        InitializeVariable(ptr, var->GetType());
148 149 150 151
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
      } else {
        auto* ptr = scope->Var(var->Name());
152
        InitializeVariable(ptr, var->GetType());
153 154 155 156 157 158 159
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
160
      InitializeVariable(ptr, var->GetType());
161 162 163 164 165 166
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
    }
  }
}

Y
Yu Yang 已提交
167
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
T
typhoonzero 已提交
168
                   bool create_local_scope, bool create_vars) {
X
Xin Pan 已提交
169
  platform::RecordBlock b(block_id);
170
  if (FLAGS_use_mkldnn) EnableMKLDNN(pdesc);
Q
Qiao Longfei 已提交
171 172
  auto ctx = Prepare(pdesc, block_id);
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
173 174
}

175 176 177 178 179 180 181
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
182
    const BlockDesc& block,
L
Liu Yiqun 已提交
183
    const std::map<std::string, const LoDTensor*>& feed_targets,
184 185
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
186
  for (auto* op : block.AllOps()) {
187 188
    if (op->Type() == kFeedOpType) {
      feed_count++;
L
Liu Yiqun 已提交
189
      // The input variable's name of feed_op should be feed_holder_name.
190 191 192 193 194 195 196 197 198 199 200 201 202 203 204
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

205
    if (!feed_holder_name.empty()) {
L
Liu Yiqun 已提交
206
      // When feed operator are present, so should be feed_holder.
207 208 209 210 211 212 213
      auto var = block.FindVar(feed_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              feed_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
                        "'%s' variable should be 'FEED_MINIBATCH' type",
                        feed_holder_name);
    }
214 215 216 217 218 219 220 221 222 223 224 225
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
L
Liu Yiqun 已提交
226 227
    const BlockDesc& block,
    const std::map<std::string, LoDTensor*>& fetch_targets,
228 229
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
230
  for (auto* op : block.AllOps()) {
231 232
    if (op->Type() == kFetchOpType) {
      fetch_count++;
L
Liu Yiqun 已提交
233
      // The output variable's name of fetch_op should be fetch_holder_name.
234 235 236 237 238 239 240 241 242 243 244 245 246 247 248
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

249
    if (!fetch_holder_name.empty()) {
L
Liu Yiqun 已提交
250
      // When fetch operator are present, so should be fetch_holder.
251 252 253 254 255 256 257
      auto var = block.FindVar(fetch_holder_name);
      PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                              fetch_holder_name);
      PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
                        "'%s' variable should be 'FETCH_LIST' type",
                        fetch_holder_name);
    }
258 259 260 261 262 263
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
264 265
                   std::map<std::string, const LoDTensor*>* feed_targets,
                   std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
266 267
                   bool create_local_scope, bool create_vars,
                   const std::string& feed_holder_name,
268
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
269
  platform::RecordBlock b(kProgramId);
270
  if (FLAGS_use_mkldnn) EnableMKLDNN(program);
271
  bool has_feed_ops =
272
      has_feed_operators(program.Block(0), *feed_targets, feed_holder_name);
273
  bool has_fetch_ops =
274
      has_fetch_operators(program.Block(0), *fetch_targets, fetch_holder_name);
275 276

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
S
sneaxiy 已提交
277
  std::unique_ptr<ProgramDesc> unique_ptr_of_copy_program;
278
  if (!has_feed_ops || !has_fetch_ops) {
S
sneaxiy 已提交
279 280
    unique_ptr_of_copy_program.reset(new ProgramDesc(program));
    copy_program = unique_ptr_of_copy_program.get();
281
  }
282 283
  auto* global_block = copy_program->MutableBlock(0);

284
  if (!has_feed_ops) {
285 286
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
287
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
288 289 290
    feed_holder->SetPersistable(true);

    int i = 0;
291
    for (auto& feed_target : (*feed_targets)) {
292 293 294 295 296 297 298 299 300 301 302 303 304 305 306
      std::string var_name = feed_target.first;
      VLOG(3) << "feed target's name: " << var_name;

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

307
  if (!has_fetch_ops) {
308 309
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
310
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
311 312 313
    fetch_holder->SetPersistable(true);

    int i = 0;
314
    for (auto& fetch_target : (*fetch_targets)) {
315 316 317 318 319 320 321 322 323 324 325 326 327 328 329
      std::string var_name = fetch_target.first;
      VLOG(3) << "fetch target's name: " << var_name;

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

330
  auto ctx = Prepare(*copy_program, 0);
W
Wu Yi 已提交
331 332 333
  RunPreparedContext(ctx.get(), scope, feed_targets, fetch_targets,
                     create_local_scope, create_vars, feed_holder_name,
                     fetch_holder_name);
334 335
}

Q
Qiao Longfei 已提交
336 337
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
    const ProgramDesc& program, int block_id) {
Q
Qiyang Min 已提交
338 339
  std::unique_ptr<ExecutorPrepareContext> ctx(
      new ExecutorPrepareContext(program, block_id));
D
dzhwinter 已提交
340
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
Y
Yu Yang 已提交
341 342 343 344
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
Q
Qiyang Min 已提交
345
  return ctx;
Y
Yu Yang 已提交
346 347
}

T
refine  
typhoonzero 已提交
348
std::vector<std::shared_ptr<ExecutorPrepareContext>> Executor::Prepare(
T
typhoonzero 已提交
349 350 351 352
    const ProgramDesc& program, const std::vector<int>& block_ids) {
  std::vector<std::shared_ptr<ExecutorPrepareContext>> result;
  for (auto& bid : block_ids) {
    auto* ctx = new ExecutorPrepareContext(program, bid);
D
dzhwinter 已提交
353
    PADDLE_ENFORCE_LT(static_cast<size_t>(bid), program.Size());
T
typhoonzero 已提交
354 355 356 357 358 359 360 361 362
    auto& block = program.Block(bid);
    for (auto& op_desc : block.AllOps()) {
      ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
    }
    result.push_back(std::shared_ptr<ExecutorPrepareContext>(ctx));
  }
  return result;
}

Y
Yu Yang 已提交
363
void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
Q
qiaolongfei 已提交
364 365
                                  bool create_local_scope, bool create_vars,
                                  bool keep_kids) {
Y
Yu Yang 已提交
366 367 368 369
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
370 371
    }
    CreateVariables(ctx->prog_, local_scope, ctx->block_id_);
L
Liu Yiqun 已提交
372
  }
Y
Yu Yang 已提交
373

D
dzhwinter 已提交
374
#ifndef _WIN32
S
sneaxiy 已提交
375 376
  int64_t max_memory_size = GetEagerDeletionThreshold();
  std::unique_ptr<GarbageCollector<Tensor>> gc;
S
sneaxiy 已提交
377 378 379 380 381 382
  // WhileOp would set keep_kids to false
  // WhileGradOp would need the scopes created in WhileOp
  // Perhaps, we should not perform eager deletion in WhileOp
  // The scopes and variables created by WhileOp would be deleted
  // in WhileGradOp.
  if (max_memory_size >= 0 && !keep_kids) {
S
sneaxiy 已提交
383
    ctx->ResetReferenceCount();
S
sneaxiy 已提交
384 385 386 387 388 389 390 391 392 393 394 395 396
#ifdef PADDLE_WITH_CUDA
    if (platform::is_gpu_place(place_)) {
      gc.reset(new DefaultStreamGarbageCollector<Tensor>(
          boost::get<platform::CUDAPlace>(place_), max_memory_size));
    } else {
#endif
      gc.reset(new CPUGarbageCollector<Tensor>(
          boost::get<platform::CPUPlace>(place_), max_memory_size));
#ifdef PADDLE_WITH_CUDA
    }
#endif
  }

Y
Yu Yang 已提交
397
  for (auto& op : ctx->ops_) {
398
    op->Run(*local_scope, place_);
S
sneaxiy 已提交
399 400

    if (gc != nullptr) {
S
sneaxiy 已提交
401 402
      DeleteUnusedTensors(*local_scope, op.get(), gc.get(),
                          &(ctx->cur_ref_cnts_));
S
sneaxiy 已提交
403
    }
Y
Yang Yang 已提交
404

Y
Yu Yang 已提交
405 406 407 408 409
    if (FLAGS_benchmark) {
      VLOG(2) << "Memory used after operator " + op->Type() + " running: "
              << memory::memory_usage(place_);
    }
  }
S
sneaxiy 已提交
410

S
sneaxiy 已提交
411
  if (gc != nullptr) {
S
sneaxiy 已提交
412
    gc->Wait();
S
sneaxiy 已提交
413
  } else {
S
sneaxiy 已提交
414
    platform::DeviceContextPool::Instance().Get(place_)->Wait();
Y
Yu Yang 已提交
415
  }
D
dzhwinter 已提交
416 417 418 419 420 421 422 423 424 425
#else   // WIN32
  for (auto& op : ctx->ops_) {
    op->Run(*local_scope, place_);
    if (FLAGS_benchmark) {
      VLOG(2) << "Memory used after operator " + op->Type() + " running: "
              << memory::memory_usage(place_);
    }
  }
  platform::DeviceContextPool::Instance().Get(place_)->Wait();
#endif  // NOT WIN32
D
dzhwinter 已提交
426

Q
qiaolongfei 已提交
427
  if (local_scope != scope) {
Y
Yu Yang 已提交
428
    scope->DeleteScope(local_scope);
429
  } else {
Q
qiaolongfei 已提交
430 431 432 433 434
    if (!keep_kids) {
      // By default, we should delete all kid scopes after run executor because
      // some operators may create local scope when running, such as while_op.
      // But when while_op also create a local executor to run it's sub block,
      // the sub scopes it created should not be dropped immediately, because
Q
qiaolongfei 已提交
435 436
      // while_grad_op will use some variables created during while_op run, so
      // we need to keep the kids and wait for the outer executor to drop them.
Q
qiaolongfei 已提交
437 438
      scope->DropKids();
    }
Y
Yu Yang 已提交
439
  }
Q
qiaolongfei 已提交
440

Y
Yu Yang 已提交
441 442 443 444 445 446 447 448
  if (FLAGS_benchmark) {
    VLOG(2) << "-------------------------------------------------------";
    VLOG(2) << "Memory used after deleting local scope: "
            << memory::memory_usage(place_);
    VLOG(2) << "-------------------------------------------------------";
  }
}

449 450
void Executor::RunPreparedContext(
    ExecutorPrepareContext* ctx, Scope* scope,
451
    std::map<std::string, const LoDTensor*>* feed_targets,
W
Wu Yi 已提交
452 453 454
    std::map<std::string, LoDTensor*>* fetch_targets, bool create_local_scope,
    bool create_vars, const std::string& feed_holder_name,
    const std::string& fetch_holder_name) {
455 456
  auto& global_block = ctx->prog_.Block(ctx->block_id_);

457
  PADDLE_ENFORCE(
458
      has_feed_operators(global_block, *feed_targets, feed_holder_name),
459 460
      "Program in ExecutorPrepareContext should has feed_ops.");
  PADDLE_ENFORCE(
461
      has_fetch_operators(global_block, *fetch_targets, fetch_holder_name),
462 463
      "Program in the prepared context should has fetch_ops.");

464 465 466 467 468
  // map the data of feed_targets to feed_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
469 470
      SetFeedVariable(scope, *(*feed_targets)[feed_target_name],
                      feed_holder_name, idx);
471 472 473
    }
  }

W
Wu Yi 已提交
474
  RunPreparedContext(ctx, scope, create_local_scope, create_vars);
475 476 477 478 479 480

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block.AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
481
      *(*fetch_targets)[fetch_target_name] =
482 483 484 485 486
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

487 488 489 490 491 492 493 494 495 496 497
void Executor::EnableMKLDNN(const ProgramDesc& program) {
#ifdef PADDLE_WITH_MKLDNN
  VLOG(3) << "use_mkldnn=True";
  for (size_t bid = 0; bid < program.Size(); ++bid) {
    auto* block = const_cast<ProgramDesc&>(program).MutableBlock(bid);
    for (auto* op : block->AllOps()) {
      if (op->HasAttr("use_mkldnn")) {
        op->SetAttr("use_mkldnn", true);
      }
    }
  }
498 499 500
#else
  LOG(WARNING)
      << "'MKLDNN' is not supported, Please re-compile with WITH_MKLDNN option";
501 502 503
#endif
}

Q
qijun 已提交
504 505
}  // namespace framework
}  // namespace paddle