naive_executor.cc 4.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

15
#include <memory>
X
Xin Pan 已提交
16
#include <string>
17
#include <utility>
X
Xin Pan 已提交
18 19
#include <vector>

20 21 22
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
X
Xin Pan 已提交
23
#include "paddle/fluid/framework/naive_executor.h"
24 25
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
W
Wang Guibao 已提交
26
#include "paddle/fluid/framework/variable_helper.h"
27 28 29 30
#include "paddle/fluid/string/pretty_log.h"

namespace paddle {
namespace framework {
31 32 33
void NaiveExecutor::Prepare(Scope *scope, const ProgramDesc &program_desc,
                            int block_id, bool with_feed_fetch_ops) {
  if (!scope) {
34 35
    scope_ = new framework::Scope;
  } else {
36
    scope_ = scope;
37
  }
38 39

  VLOG(3) << "NaiveExecutor init with scope " << scope;
40 41 42 43 44
  CreateOps(program_desc, block_id, with_feed_fetch_ops);
}

void NaiveExecutor::Run() {
  for (auto &op : ops_) {
Y
Yan Chunwei 已提交
45 46
    VLOG(4) << std::this_thread::get_id() << " run "
            << op->DebugStringEx(scope_) << " on scope " << scope_;
47
    op->SetIsCalledByExecutor(false);
48 49 50 51
    op->Run(*scope_, place_);
  }
}

52 53 54 55
void NaiveExecutor::CreateVariables(const ProgramDesc &desc, int block_id,
                                    bool persistable, Scope *scope) {
  PADDLE_ENFORCE_NOT_NULL(scope);

56 57
  auto &global_block = desc.Block(block_id);

58 59 60 61
  const auto *anc = scope;
  PADDLE_ENFORCE(anc->parent() != anc);
  while (anc->parent()) {
    anc = anc->parent();
62 63
  }

Y
Yan Chunwei 已提交
64
  int num_vars = 0;
65 66 67 68
  for (auto &var : global_block.AllVars()) {
    if (var->Name() == framework::kEmptyVarName) {
      continue;
    }
Y
Yan Chunwei 已提交
69
    num_vars++;
70 71 72 73 74 75 76 77 78 79 80 81 82

    if (persistable == var->Persistable()) {
      if (persistable) {
        if (!anc->FindVar(var->Name())) {
          auto *ptr = const_cast<Scope *>(anc)->Var(var->Name());
          VLOG(3) << scope << " Create persistable variable " << var->Name()
                  << ", which pointer is " << ptr;
          InitializeVariable(ptr, var->GetType());
        }
      } else {
        auto *ptr = const_cast<Scope *>(scope)->Var(var->Name());
        VLOG(3) << scope << " Create variable " << var->Name()
                << ", which pointer is " << ptr;
83 84 85 86
        InitializeVariable(ptr, var->GetType());
      }
    }
  }
Y
Yan Chunwei 已提交
87
  VLOG(4) << "naive executor create " << num_vars << " vars";
88 89 90 91 92 93 94
}

void NaiveExecutor::CreateOps(const ProgramDesc &desc, int block_id,
                              bool with_feed_fetch_ops) {
  for (const auto &op_desc : desc.Block(block_id).AllOps()) {
    if (!with_feed_fetch_ops &&
        (op_desc->Type() == "feed" || op_desc->Type() == "fetch")) {
95 96
      LOG(INFO) << "---  skip [" << op_desc->Input("X")[0] << "], "
                << op_desc->Type() << " -> " << op_desc->Output("Out")[0];
97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
      continue;
    }
    ops_.emplace_back(OpRegistry::CreateOp(*op_desc));
  }
}

LoDTensor *NaiveExecutor::FindTensor(const std::string &name) {
  PADDLE_ENFORCE(scope_, "Need to init scope first");
  auto *var = scope_->FindVar(name);
  PADDLE_ENFORCE(var, "No variable [%s] in the scope");
  auto *tensor = const_cast<LoDTensor *>(&var->Get<LoDTensor>());
  return tensor;
}

void NaiveExecutor::CleanFeedFetchOps() {
  std::vector<std::unique_ptr<OperatorBase>> ops;
  for (auto &op : ops_) {
    if (op->Type() != "feed" && op->Type() != "fetch") {
      ops.emplace_back(std::move(op));
    }
  }
  ops_.swap(ops);
}

121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
NaiveExecutor::~NaiveExecutor() {
#ifdef PADDLE_WITH_MKLDNN
  // Clear mkl-dnn cache,
  // this is needed to have mkl-dnn unit tests working
  if (platform::is_cpu_place(place_)) {
    platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
    platform::MKLDNNDeviceContext *dev_ctx =
        (platform::MKLDNNDeviceContext *)pool.Get(place_);
    dev_ctx->ResetBlobMap();
    platform::MKLDNNDeviceContext::tls().set_cur_paddle_data_layout(
        paddle::framework::DataLayout::kNCHW);
  }
#endif
}

136 137
}  // namespace framework
}  // namespace paddle