executor.cc 13.0 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/executor.h"
Y
Yang Yang 已提交
16

17
#include "paddle/fluid/framework/channel.h"
Y
Yi Wang 已提交
18 19 20 21 22 23
#include "paddle/fluid/framework/feed_fetch_method.h"
#include "paddle/fluid/framework/lod_rank_table.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/reader.h"
#include "paddle/fluid/platform/place.h"
X
Xin Pan 已提交
24
#include "paddle/fluid/platform/profiler.h"
Y
Yang Yu 已提交
25

D
dzhwinter 已提交
26
DECLARE_bool(benchmark);
Y
Yang Yu 已提交
27 28 29
DEFINE_bool(check_nan_inf, false,
            "Checking whether operator produce NAN/INF or not. It will be "
            "extremely slow so please use this flag wisely.");
Q
qijun 已提交
30 31 32

namespace paddle {
namespace framework {
X
Xin Pan 已提交
33 34 35 36 37
namespace {
// block id starts from 0. This id is used to represent the codeblock
// wrapping the first block 0.
int kProgramId = -1;
}  // namespace
Q
qijun 已提交
38

Q
Qiao Longfei 已提交
39 40 41
ExecutorPrepareContext::ExecutorPrepareContext(
    const framework::ProgramDesc& prog, size_t block_id)
    : prog_(prog), block_id_(block_id) {}
Y
Yu Yang 已提交
42

Q
Qiao Longfei 已提交
43 44 45
ExecutorPrepareContext::~ExecutorPrepareContext() {
  VLOG(5) << "destroy ExecutorPrepareContext";
}
Y
Yu Yang 已提交
46

D
dzhwinter 已提交
47
Executor::Executor(const platform::Place& place) : place_(place) {}
Q
qijun 已提交
48

49 50
static void CreateTensor(Variable* var, proto::VarType::Type var_type) {
  if (var_type == proto::VarType::LOD_TENSOR) {
Q
QI JUN 已提交
51
    var->GetMutable<LoDTensor>();
52
  } else if (var_type == proto::VarType::SELECTED_ROWS) {
Q
QI JUN 已提交
53
    var->GetMutable<SelectedRows>();
54
  } else if (var_type == proto::VarType::FEED_MINIBATCH) {
Q
QI JUN 已提交
55
    var->GetMutable<FeedFetchList>();
56
  } else if (var_type == proto::VarType::FETCH_LIST) {
Q
QI JUN 已提交
57
    var->GetMutable<FeedFetchList>();
58
  } else if (var_type == proto::VarType::STEP_SCOPES) {
Y
Yu Yang 已提交
59
    var->GetMutable<std::vector<framework::Scope>>();
60
  } else if (var_type == proto::VarType::LOD_RANK_TABLE) {
Y
Yu Yang 已提交
61
    var->GetMutable<LoDRankTable>();
62
  } else if (var_type == proto::VarType::LOD_TENSOR_ARRAY) {
Y
Yu Yang 已提交
63
    var->GetMutable<LoDTensorArray>();
64
  } else if (var_type == proto::VarType::PLACE_LIST) {
Y
Yang Yu 已提交
65
    var->GetMutable<platform::PlaceList>();
66
  } else if (var_type == proto::VarType::READER) {
F
fengjiayi 已提交
67
    var->GetMutable<ReaderHolder>();
68 69
  } else if (var_type == proto::VarType::CHANNEL) {
    var->GetMutable<ChannelHolder>();
T
typhoonzero 已提交
70 71
  } else if (var_type == proto::VarType::RAW) {
    // GetMutable will be called in operator
Q
QI JUN 已提交
72 73
  } else {
    PADDLE_THROW(
Y
Yu Yang 已提交
74
        "Variable type %d is not in "
F
fengjiayi 已提交
75
        "[LOD_TENSOR, SELECTED_ROWS, FEED_MINIBATCH, FETCH_LIST, "
T
typhoonzero 已提交
76
        "LOD_RANK_TABLE, PLACE_LIST, READER, CHANNEL, RAW]",
Y
Yu Yang 已提交
77
        var_type);
Q
QI JUN 已提交
78 79 80
  }
}

Y
Yang Yu 已提交
81 82
static void CheckTensorNANOrInf(const std::string& name,
                                const framework::Tensor& tensor) {
Y
Yang Yu 已提交
83
  if (tensor.memory_size() == 0) {
Y
Yang Yu 已提交
84 85
    return;
  }
Y
Yang Yu 已提交
86 87
  if (tensor.type().hash_code() != typeid(float).hash_code() &&
      tensor.type().hash_code() != typeid(double).hash_code()) {
Y
Yang Yu 已提交
88 89
    return;
  }
Y
Yi Wang 已提交
90 91 92 93
  PADDLE_ENFORCE(!framework::TensorContainsInf(tensor),
                 "Tensor %s contains Inf", name);
  PADDLE_ENFORCE(!framework::TensorContainsNAN(tensor),
                 "Tensor %s contains NAN", name);
Y
Yang Yu 已提交
94 95
}

96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
void Executor::CreateVariables(const ProgramDesc& pdesc, Scope* scope) {
  auto& global_block = pdesc.Block(0);

  const Scope* ancestor_scope = scope;
  while (ancestor_scope->parent()) {
    ancestor_scope = ancestor_scope->parent();
  }

  if (ancestor_scope != scope) {
    for (auto& var : global_block.AllVars()) {
      if (var->Name() == framework::kEmptyVarName) {
        continue;
      }

      if (var->Persistable()) {
        auto* ptr = const_cast<Scope*>(ancestor_scope)->Var(var->Name());
        CreateTensor(ptr, var->GetType());
        VLOG(3) << "Create Variable " << var->Name()
                << " global, which pointer is " << ptr;
      } else {
        auto* ptr = scope->Var(var->Name());
        CreateTensor(ptr, var->GetType());
        VLOG(3) << "Create Variable " << var->Name()
                << " locally, which pointer is " << ptr;
      }
    }
  } else {
    for (auto& var : global_block.AllVars()) {
      auto* ptr = scope->Var(var->Name());
      CreateTensor(ptr, var->GetType());
      VLOG(3) << "Create variable " << var->Name() << ", which pointer is "
              << ptr;
    }
  }
}

Y
Yu Yang 已提交
132
void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id,
T
typhoonzero 已提交
133
                   bool create_local_scope, bool create_vars) {
X
Xin Pan 已提交
134
  platform::RecordBlock b(block_id);
Q
Qiao Longfei 已提交
135 136
  auto ctx = Prepare(pdesc, block_id);
  RunPreparedContext(ctx.get(), scope, create_local_scope, create_vars);
Q
qijun 已提交
137 138
}

139 140 141 142 143 144 145
// Check whether the block already has feed operators and feed_holder.
// Return false if the block does not have any feed operators.
// If some feed operators have been prepended to the block, check that
// the info contained in these feed operators matches the feed_targets
// and feed_holder_name. Raise exception when any mismatch is found.
// Return true if the block has feed operators and holder of matching info.
static bool has_feed_operators(
146 147
    const BlockDesc& block,
    std::map<std::string, const LoDTensor*>& feed_targets,
148 149
    const std::string& feed_holder_name) {
  size_t feed_count = 0;
150
  for (auto* op : block.AllOps()) {
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168
    if (op->Type() == kFeedOpType) {
      feed_count++;
      PADDLE_ENFORCE_EQ(op->Input("X")[0], feed_holder_name,
                        "Input to feed op should be '%s'", feed_holder_name);
      std::string feed_target_name = op->Output("Out")[0];
      PADDLE_ENFORCE(
          feed_targets.find(feed_target_name) != feed_targets.end(),
          "Feed operator output name '%s' cannot be found in 'feed_targets'",
          feed_target_name);
    }
  }

  if (feed_count > 0) {
    PADDLE_ENFORCE_EQ(
        feed_count, feed_targets.size(),
        "The number of feed operators should match 'feed_targets'");

    // When feed operator are present, so should be feed_holder
169
    auto var = block.FindVar(feed_holder_name);
170 171
    PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                            feed_holder_name);
172
    PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FEED_MINIBATCH,
173 174 175 176 177 178 179 180 181 182 183 184 185 186
                      "'%s' variable should be 'FEED_MINIBATCH' type",
                      feed_holder_name);
  }

  return feed_count > 0;
}

// Check whether the block already has fetch operators and fetch_holder.
// Return false if the block does not have any fetch operators.
// If some fetch operators have been appended to the block, check that
// the info contained in these fetch operators matches the fetch_targets
// and fetch_holder_name. Raise exception when any mismatch is found.
// Return true if the block has fetch operators and holder of matching info.
static bool has_fetch_operators(
187
    const BlockDesc& block, std::map<std::string, LoDTensor*>& fetch_targets,
188 189
    const std::string& fetch_holder_name) {
  size_t fetch_count = 0;
190
  for (auto* op : block.AllOps()) {
191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
    if (op->Type() == kFetchOpType) {
      fetch_count++;
      PADDLE_ENFORCE_EQ(op->Output("Out")[0], fetch_holder_name,
                        "Output of fetch op should be '%s'", fetch_holder_name);
      std::string fetch_target_name = op->Input("X")[0];
      PADDLE_ENFORCE(
          fetch_targets.find(fetch_target_name) != fetch_targets.end(),
          "Fetch operator input name '%s' cannot be found in 'fetch_targets'",
          fetch_target_name);
    }
  }

  if (fetch_count > 0) {
    PADDLE_ENFORCE_EQ(
        fetch_count, fetch_targets.size(),
        "The number of fetch operators should match 'fetch_targets'");

    // When fetch operator are present, so should be fetch_holder
209
    auto var = block.FindVar(fetch_holder_name);
210 211
    PADDLE_ENFORCE_NOT_NULL(var, "Block should already have a '%s' variable",
                            fetch_holder_name);
212
    PADDLE_ENFORCE_EQ(var->GetType(), proto::VarType::FETCH_LIST,
213 214 215 216 217 218 219 220 221 222
                      "'%s' variable should be 'FETCH_LIST' type",
                      fetch_holder_name);
  }

  return fetch_count > 0;
}

void Executor::Run(const ProgramDesc& program, Scope* scope,
                   std::map<std::string, const LoDTensor*>& feed_targets,
                   std::map<std::string, LoDTensor*>& fetch_targets,
223 224
                   bool create_vars, const std::string& feed_holder_name,
                   const std::string& fetch_holder_name) {
X
Xin Pan 已提交
225
  platform::RecordBlock b(kProgramId);
226 227 228 229 230 231 232 233 234 235
  bool has_feed_ops =
      has_feed_operators(program.Block(0), feed_targets, feed_holder_name);
  bool has_fetch_ops =
      has_fetch_operators(program.Block(0), fetch_targets, fetch_holder_name);

  ProgramDesc* copy_program = const_cast<ProgramDesc*>(&program);
  if (!has_feed_ops || !has_fetch_ops) {
    copy_program = std::unique_ptr<ProgramDesc>(new ProgramDesc(program)).get();
  }

236 237
  auto* global_block = copy_program->MutableBlock(0);

238
  if (!has_feed_ops) {
239 240
    // create feed_holder variable
    auto* feed_holder = global_block->Var(feed_holder_name);
241
    feed_holder->SetType(proto::VarType::FEED_MINIBATCH);
242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270
    feed_holder->SetPersistable(true);

    int i = 0;
    for (auto& feed_target : feed_targets) {
      std::string var_name = feed_target.first;
      VLOG(3) << "feed target's name: " << var_name;

      // prepend feed op
      auto* op = global_block->PrependOp();
      op->SetType(kFeedOpType);
      op->SetInput("X", {feed_holder_name});
      op->SetOutput("Out", {var_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

  // map the data of feed_targets to feed_holder
  for (auto* op : global_block->AllOps()) {
    if (op->Type() == kFeedOpType) {
      std::string feed_target_name = op->Output("Out")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
      SetFeedVariable(scope, *feed_targets[feed_target_name], feed_holder_name,
                      idx);
    }
  }

271
  if (!has_fetch_ops) {
272 273
    // create fetch_holder variable
    auto* fetch_holder = global_block->Var(fetch_holder_name);
274
    fetch_holder->SetType(proto::VarType::FETCH_LIST);
275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293
    fetch_holder->SetPersistable(true);

    int i = 0;
    for (auto& fetch_target : fetch_targets) {
      std::string var_name = fetch_target.first;
      VLOG(3) << "fetch target's name: " << var_name;

      // append fetch op
      auto* op = global_block->AppendOp();
      op->SetType(kFetchOpType);
      op->SetInput("X", {var_name});
      op->SetOutput("Out", {fetch_holder_name});
      op->SetAttr("col", {static_cast<int>(i)});
      op->CheckAttrs();

      i++;
    }
  }

294
  Run(*copy_program, scope, 0, create_vars, create_vars);
295 296 297 298 299 300 301 302 303 304 305 306

  // obtain the data of fetch_targets from fetch_holder
  for (auto* op : global_block->AllOps()) {
    if (op->Type() == kFetchOpType) {
      std::string fetch_target_name = op->Input("X")[0];
      int idx = boost::get<int>(op->GetAttr("col"));
      *fetch_targets[fetch_target_name] =
          GetFetchVariable(*scope, fetch_holder_name, idx);
    }
  }
}

Q
Qiao Longfei 已提交
307 308
std::unique_ptr<ExecutorPrepareContext> Executor::Prepare(
    const ProgramDesc& program, int block_id) {
Y
Yu Yang 已提交
309 310 311 312 313 314
  auto* ctx = new ExecutorPrepareContext(program, block_id);
  PADDLE_ENFORCE_LT(static_cast<size_t>(block_id), program.Size());
  auto& block = program.Block(block_id);
  for (auto& op_desc : block.AllOps()) {
    ctx->ops_.push_back(OpRegistry::CreateOp(*op_desc));
  }
Q
Qiao Longfei 已提交
315
  return std::unique_ptr<ExecutorPrepareContext>(ctx);
Y
Yu Yang 已提交
316 317 318 319 320 321 322 323 324 325 326 327
}

void Executor::RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
                                  bool create_local_scope, bool create_vars) {
  Scope* local_scope = scope;
  if (create_vars) {
    if (create_local_scope) {
      local_scope = &scope->NewScope();
    } else {
    }  // if (create_local_scope)
  }    // if (create_vars)

328 329
  CreateVariables(ctx->prog_, local_scope);

Y
Yu Yang 已提交
330 331
  for (auto& op : ctx->ops_) {
    VLOG(3) << place_ << " " << op->DebugStringEx(local_scope);
332
    op->Run(*local_scope, place_);
Y
Yu Yang 已提交
333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358

    if (FLAGS_benchmark) {
      VLOG(2) << "Memory used after operator " + op->Type() + " running: "
              << memory::memory_usage(place_);
    }
    if (FLAGS_check_nan_inf) {
      for (auto& vname : op->OutputVars(true)) {
        auto* var = local_scope->FindVar(vname);
        if (var == nullptr) continue;
        if (var->IsType<framework::LoDTensor>()) {
          CheckTensorNANOrInf(vname, var->Get<framework::LoDTensor>());
        }
      }
    }
  }
  if (create_vars && create_local_scope) {
    scope->DeleteScope(local_scope);
  }
  if (FLAGS_benchmark) {
    VLOG(2) << "-------------------------------------------------------";
    VLOG(2) << "Memory used after deleting local scope: "
            << memory::memory_usage(place_);
    VLOG(2) << "-------------------------------------------------------";
  }
}

Q
qijun 已提交
359 360
}  // namespace framework
}  // namespace paddle