op_desc.cc 22.9 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/op_desc.h"
16
#include <algorithm>
Y
Yu Yang 已提交
17
#include <functional>
18 19
#include <mutex>  // NOLINT
#include <string>
Y
Yu Yang 已提交
20
#include <unordered_map>
21
#include "glog/logging.h"
Y
Yi Wang 已提交
22
#include "paddle/fluid/framework/block_desc.h"
Y
yuyang18 已提交
23
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yi Wang 已提交
24 25 26
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/shape_inference.h"
Y
Yu Yang 已提交
27

F
fengjiayi 已提交
28 29 30
namespace paddle {
namespace framework {

Y
Yu Yang 已提交
31 32
class OpDesc;
class BlockDesc;
33 34
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
35
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  AttrReader Attrs() const override;

  const std::vector<std::string> &Inputs(
      const std::string &name) const override;

  const std::vector<std::string> &Outputs(
      const std::string &name) const override;

53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
  void ShareDim(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
    const std::string &input_n = Inputs(in)[i];
    const std::string &output_n = Outputs(out)[j];

    PADDLE_ENFORCE(input_n != framework::kEmptyVarName, "The %s[%d] is @EMPTY@",
                   in, i);
    PADDLE_ENFORCE(output_n != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", out, j);

    auto *in_var = block_.FindVarRecursive(input_n);
    auto *out_var = block_.FindVarRecursive(output_n);

    PADDLE_ENFORCE(in_var->GetType() == out_var->GetType(),
                   "The type of %s and %s is not the same.", input_n, output_n);

    SetDim(output_n, GetDim(input_n));
  }

Q
Qiao Longfei 已提交
74 75 76 77
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
C
chengduo 已提交
78 79 80 81
    PADDLE_ENFORCE(Inputs(in)[i] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", in, i);
    PADDLE_ENFORCE(Outputs(out)[j] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", out, j);
Q
Qiao Longfei 已提交
82 83
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
C
chengduo 已提交
84 85 86
    if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
        in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
      VLOG(3) << "input " << in << " is not LodTensor or LodTensorArray.";
X
fix  
Xin Pan 已提交
87 88
      return;
    }
89
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
90
  }
D
dzhwinter 已提交
91

C
chengduo 已提交
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
  void DecreaseLoDLevel(const std::string &in, const std::string &out,
                        size_t i = 0, size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
    PADDLE_ENFORCE(Inputs(in)[i] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", in, i);
    PADDLE_ENFORCE(Outputs(out)[j] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", out, j);
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
    PADDLE_ENFORCE(out_var->GetType() == proto::VarType::LOD_TENSOR_ARRAY ||
                       out_var->GetType() == proto::VarType::LOD_TENSOR,
                   "The input %s should be LodTensorArray or LodTensor.",
                   out_var->Name());
    PADDLE_ENFORCE(in_var->GetType() == proto::VarType::LOD_TENSOR,
                   "The input %s should be LodTensor.", in_var->Name());
    if (in_var->GetLoDLevel() > 0) {
      out_var->SetLoDLevel(in_var->GetLoDLevel() - 1);
    }
  }

113 114 115
  bool IsRuntime() const override;

 protected:
116
  proto::VarType::Type GetVarType(const std::string &name) const override;
Q
Qiao Longfei 已提交
117

118 119 120 121
  DDim GetDim(const std::string &name) const override;

  void SetDim(const std::string &name, const DDim &dim) override;

F
fengjiayi 已提交
122 123 124 125
  std::vector<DDim> GetRepeatedDims(const std::string &name) const override;

  void SetRepeatedDims(const std::string &name,
                       const std::vector<DDim> &dims) override;
F
fengjiayi 已提交
126

F
fengjiayi 已提交
127 128
  InferShapeVarPtr GetVarPtr(const std::string &name) override;

Y
Yu Yang 已提交
129 130
  const OpDesc &op_;
  const BlockDesc &block_;
131 132
};

Y
Yu Yang 已提交
133 134
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
135
  desc_.set_type(type);
F
fengjiayi 已提交
136 137 138
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
139
  need_update_ = true;
F
fengjiayi 已提交
140 141
}

X
Xin Pan 已提交
142 143 144 145 146 147
OpDesc::OpDesc(const OpDesc &other, BlockDesc *block) {
  CopyFrom(other);
  block_ = block;
  need_update_ = true;
}

148
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
149 150 151 152 153 154 155
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
  need_update_ = true;
}

F
fengjiayi 已提交
156
OpDesc::OpDesc(const proto::OpDesc &desc, BlockDesc *block)
157 158 159 160
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
161
    const proto::OpDesc::Var &var = desc_.inputs(i);
162 163 164 165 166 167 168 169 170 171
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
172
    const proto::OpDesc::Var &var = desc_.outputs(i);
173 174 175 176 177 178 179 180
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
181
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
182
    std::string attr_name = attr.name();
183
    // The sub_block referred to by the BLOCK attr hasn't been added
X
Xin Pan 已提交
184 185 186
    // to ProgramDesc class yet, we skip setting BLOCK/BLOCKS attr here.
    if (attr.type() != proto::AttrType::BLOCK &&
        attr.type() != proto::AttrType::BLOCKS) {
187 188
      attrs_[attr_name] = GetAttrValue(attr);
    }
189
  }
190
  this->block_ = block;
191 192
}

Y
Yu Yang 已提交
193
proto::OpDesc *OpDesc::Proto() {
194
  Flush();
195
  return &desc_;
F
fengjiayi 已提交
196 197
}

Y
Yu Yang 已提交
198
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
199 200 201 202 203 204
  auto it = inputs_.find(name);
  PADDLE_ENFORCE(it != inputs_.end(), "Input %s cannot be found in Op %s", name,
                 Type());
  return it->second;
}

Y
Yu Yang 已提交
205
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
206 207 208 209 210 211 212
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
213 214
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
215 216 217 218
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
219
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
220 221 222 223 224 225
  auto it = outputs_.find(name);
  PADDLE_ENFORCE(it != outputs_.end(), "Output %s cannot be found in Op %s",
                 name, Type());
  return it->second;
}

Y
Yu Yang 已提交
226
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
227 228 229 230 231 232 233
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
234 235
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
236 237 238 239
  need_update_ = true;
  this->outputs_[param_name] = args;
}

240 241 242 243 244 245 246 247 248 249
bool OpDesc::HasProtoAttr(const std::string &name) const {
  auto &op_info = OpInfoMap::Instance();
  if (op_info.Has(desc_.type())) {
    auto op_info_ptr = op_info.Get(desc_.type());
    if (op_info_ptr.HasOpProtoAndChecker()) {
      const proto::OpProto &proto = op_info_ptr.Proto();
      for (int i = 0; i != proto.attrs_size(); ++i) {
        const proto::OpProto::Attr &attr = proto.attrs(i);
        if (attr.name() == name) {
          return true;
L
luotao1 已提交
250 251
        }
      }
L
luotao1 已提交
252 253 254 255 256
    }
  }
  return false;
}

Y
Yu Yang 已提交
257
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
258 259
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
260
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
261 262
}

Y
Yu Yang 已提交
263
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
264 265 266 267 268 269 270 271
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

Y
Yu Yang 已提交
272
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
M
minqiyang 已提交
273 274 275 276 277 278 279
  // NOTICE(minqiyang): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
      boost::get<std::vector<int>>(v).size() == 0u) {
    // Find current attr via attr name and set the correct attribute value
M
minqiyang 已提交
280
    const proto::OpProto::Attr &attr = GetProtoAttr(name);
M
minqiyang 已提交
281 282
    switch (attr.type()) {
      case proto::AttrType::BOOLEANS: {
M
minqiyang 已提交
283 284
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BOOLEANS";
M
minqiyang 已提交
285 286 287 288
        this->attrs_[name] = std::vector<bool>();
        break;
      }
      case proto::AttrType::INTS: {
M
minqiyang 已提交
289 290
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to INTS";
M
minqiyang 已提交
291 292 293
        this->attrs_[name] = std::vector<int>();
        break;
      }
294
      case proto::AttrType::LONGS: {
M
minqiyang 已提交
295 296
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from LONGS to LONGS";
297 298 299
        this->attrs_[name] = std::vector<int64_t>();
        break;
      }
M
minqiyang 已提交
300
      case proto::AttrType::FLOATS: {
M
minqiyang 已提交
301 302
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to FLOATS";
M
minqiyang 已提交
303 304 305 306
        this->attrs_[name] = std::vector<float>();
        break;
      }
      case proto::AttrType::STRINGS: {
M
minqiyang 已提交
307 308
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to STRINGS";
M
minqiyang 已提交
309 310 311 312
        this->attrs_[name] = std::vector<std::string>();
        break;
      }
      case proto::AttrType::BLOCKS: {
M
minqiyang 已提交
313 314
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BLOCKS";
M
minqiyang 已提交
315
        this->SetBlocksAttr(name, std::vector<BlockDesc *>());
M
minqiyang 已提交
316 317
        return;
      }
M
minqiyang 已提交
318 319
      default:
        PADDLE_THROW("Wrong attr type %d", attr.type());
M
minqiyang 已提交
320
    }
M
minqiyang 已提交
321 322
    need_update_ = true;
    return;
M
minqiyang 已提交
323 324
  }

F
fengjiayi 已提交
325 326 327 328
  this->attrs_[name] = v;
  need_update_ = true;
}

A
Abhinav Arora 已提交
329 330
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc *block) {
  this->attrs_[name] = block;
F
fengjiayi 已提交
331
  need_update_ = true;
F
fengjiayi 已提交
332 333
}

334 335 336 337 338 339
void OpDesc::SetBlocksAttr(const std::string &name,
                           std::vector<BlockDesc *> blocks) {
  this->attrs_[name] = blocks;
  need_update_ = true;
}

Y
Yu Yang 已提交
340
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
341 342 343 344 345
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
346
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
347 348 349 350 351
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  return it->second;
}

M
minqiyang 已提交
352 353 354
const proto::OpProto::Attr &OpDesc::GetProtoAttr(
    const std::string &name) const {
  const proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
M
minqiyang 已提交
355 356 357 358 359 360 361 362 363 364
  for (int i = 0; i != proto.attrs_size(); ++i) {
    const proto::OpProto::Attr &attr = proto.attrs(i);
    if (attr.name() == name) {
      return attr;
    }
  }

  PADDLE_THROW("Attribute %s is not found in proto %s", name, proto.type());
}

Y
yuyang18 已提交
365
Attribute OpDesc::GetNullableAttr(const std::string &name) const {
Y
Fix bug  
yuyang18 已提交
366 367 368 369
  auto it = attrs_.find(name);
  if (it != attrs_.end()) {
    return it->second;
  } else {
Y
yuyang18 已提交
370
    return Attribute();
Y
Fix bug  
yuyang18 已提交
371 372 373
  }
}

G
gongweibao 已提交
374 375 376 377 378 379 380 381 382 383 384 385 386 387
std::vector<int> OpDesc::GetBlocksAttrIds(const std::string &name) const {
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  auto blocks = boost::get<std::vector<BlockDesc *>>(it->second);

  std::vector<int> ids;
  for (auto n : blocks) {
    ids.push_back(n->ID());
  }

  return ids;
}

int OpDesc::GetBlockAttrId(const std::string &name) const {
F
fengjiayi 已提交
388 389
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
Y
Yu Yang 已提交
390
  return boost::get<BlockDesc *>(it->second)->ID();
F
fengjiayi 已提交
391 392
}

Y
Yu Yang 已提交
393
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
394 395 396
  return attrs_;
}

Y
Yu Yang 已提交
397
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
Y
Yancey1989 已提交
398 399
  RenameInput(old_name, new_name);
  RenameOutput(old_name, new_name);
F
fengjiayi 已提交
400 401 402
  need_update_ = true;
}

Y
Yu Yang 已提交
403 404
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
405 406 407 408
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
Y
yuyang18 已提交
409 410 411 412 413 414 415

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
    auto &op_vars = boost::get<std::vector<std::string>>(it->second);
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
416 417 418
  need_update_ = true;
}

Y
Yu Yang 已提交
419 420
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
421 422 423
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
Y
Yancey1989 已提交
424 425 426 427 428 429 430

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
    auto &op_vars = boost::get<std::vector<std::string>>(it->second);
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
431 432 433
  need_update_ = true;
}

Y
Yu Yang 已提交
434
struct SetAttrDescVisitor : public boost::static_visitor<void> {
435 436
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
437 438 439
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
440 441 442 443 444 445 446

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
447 448 449 450 451 452 453 454 455 456 457 458 459

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
460 461 462
  void operator()(const std::vector<BlockDesc *> &v) const {
    std::vector<int> blocks_idx;
    for (auto blk : v) {
T
tangwei12 已提交
463
      blocks_idx.push_back(blk->ID());
464 465 466
    }
    VectorToRepeated(blocks_idx, attr_->mutable_blocks_idx());
  }
T
tangwei12 已提交
467 468 469

  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }

470
  void operator()(int64_t v) const { attr_->set_l(v); }
T
tangwei12 已提交
471 472 473 474 475

  void operator()(const std::vector<int64_t> &v) const {
    VectorToRepeated(v, attr_->mutable_longs());
  }

Y
Yu Yang 已提交
476 477 478
  void operator()(boost::blank) const { PADDLE_THROW("Unexpected branch"); }
};

Y
Yu Yang 已提交
479
void OpDesc::Flush() {
F
fengjiayi 已提交
480
  if (need_update_) {
481
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
482
    for (auto &ipt : inputs_) {
483
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
484 485 486 487
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

488
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
489
    for (auto &opt : outputs_) {
490
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
491 492 493 494
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

495
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
496
    for (auto &attr : attrs_) {
497
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
498 499
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
500
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
501 502
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
503 504 505 506 507
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
508

509 510 511 512 513 514 515 516 517
static std::once_flag init_infer_shape_funcs;

static void InitInferShapeFuncs() {
  std::call_once(init_infer_shape_funcs, [] {
    auto &map = OpInfoMap::Instance();
    auto &info_map = *map.mutable_map();

    for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) {
      auto op_type = kern_pair.first;
C
chengduoZH 已提交
518 519 520 521
      auto it = info_map.find(op_type);
      PADDLE_ENFORCE(it != info_map.end(), "%s has not been registered",
                     op_type);
      auto &op_info = it->second;
Y
Yiqun Liu 已提交
522 523
      auto op = static_cast<OperatorWithKernel *>(op_info.Creator()(
          "", VariableNameMap{}, VariableNameMap{}, AttributeMap{}));
524 525 526 527 528 529
      if (op_info.infer_shape_) {  // infer_shape has been registered.
        continue;
      }
      op_info.infer_shape_ = [op](InferShapeContext *ctx) {
        op->InferShape(ctx);
      };
Y
Yu Yang 已提交
530
    }
531
  });
Y
Yu Yang 已提交
532 533
}

Y
Yu Yang 已提交
534
void OpDesc::CheckAttrs() {
F
fengjiayi 已提交
535 536
  PADDLE_ENFORCE(!Type().empty(),
                 "CheckAttr() can not be called before type is setted.");
Y
Yu Yang 已提交
537 538 539 540 541 542
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
F
fengjiayi 已提交
543 544 545
  checker->Check(attrs_);
}

Y
Yu Yang 已提交
546
void OpDesc::InferShape(const BlockDesc &block) const {
M
minqiyang 已提交
547
  VLOG(3) << "CompileTime infer shape on " << Type();
548 549 550 551
  InitInferShapeFuncs();
  auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
  PADDLE_ENFORCE(static_cast<bool>(infer_shape),
                 "%s's infer_shape has not been registered", this->Type());
Y
Yu Yang 已提交
552
  CompileTimeInferShapeContext ctx(*this, block);
M
minqiyang 已提交
553
  if (VLOG_IS_ON(10)) {
Y
Yu Yang 已提交
554 555 556 557 558 559 560 561 562 563
    std::ostringstream sout;
    auto inames = this->InputArgumentNames();
    sout << " From [";
    std::copy(inames.begin(), inames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "] to [";
    auto onames = this->OutputArgumentNames();
    std::copy(onames.begin(), onames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "]";
M
minqiyang 已提交
564
    VLOG(10) << sout.str();
Y
Yu Yang 已提交
565
  }
566
  infer_shape(&ctx);
Y
Yu Yang 已提交
567 568
}

Y
Yu Yang 已提交
569
void OpDesc::InferVarType(BlockDesc *block) const {
X
Xin Pan 已提交
570 571 572 573 574
  // There are a few places that var type can be set.
  // When VarDesc is created, default set to LOD_TENSOR.
  // When output variable is created, default is defaut set to LOD_TENSOR.
  // We limit here to be the only place that operator defines its customized
  // var type inference. Hence, we don't do any "default" setting here.
Y
Yu Yang 已提交
575 576 577 578 579 580
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
    info.infer_var_type_(*this, block);
  }
}

581
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
582
    const OpDesc &op, const BlockDesc &block)
583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Output(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

const std::vector<std::string> &CompileTimeInferShapeContext::Inputs(
    const std::string &name) const {
  return op_.Input(name);
}

const std::vector<std::string> &CompileTimeInferShapeContext::Outputs(
    const std::string &name) const {
  return op_.Output(name);
}

DDim CompileTimeInferShapeContext::GetDim(const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
F
fengjiayi 已提交
650
  DDim res;
Y
Yang Yang(Tony) 已提交
651
  try {
F
fengjiayi 已提交
652
    auto shape = var->GetShape();
F
fengjiayi 已提交
653
    res = shape.empty() ? make_ddim({0UL}) : make_ddim(shape);
Y
Yang Yang(Tony) 已提交
654
  } catch (...) {
M
minqiyang 已提交
655
    VLOG(5) << "GetDim of variable " << name << " error";
Y
Yang Yang(Tony) 已提交
656 657
    std::rethrow_exception(std::current_exception());
  }
F
fengjiayi 已提交
658 659 660
  return res;
}

F
fengjiayi 已提交
661
std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDims(
F
fengjiayi 已提交
662 663 664 665 666 667 668 669 670 671
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
      res.push_back(s.empty() ? make_ddim({0UL}) : make_ddim(s));
    }
  } catch (...) {
M
minqiyang 已提交
672
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
F
fengjiayi 已提交
673 674 675
    std::rethrow_exception(std::current_exception());
  }
  return res;
676 677 678 679
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
680
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
681
}
F
fengjiayi 已提交
682 683 684 685 686 687 688 689

void CompileTimeInferShapeContext::SetRepeatedDims(
    const std::string &name, const std::vector<DDim> &dims) {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
  std::vector<std::vector<int64_t>> dim_vec(dims.size());
  std::transform(dims.begin(), dims.end(), dim_vec.begin(), vectorize);
  var->SetShapes(dim_vec);
690
}
F
fengjiayi 已提交
691

692 693
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

694
proto::VarType::Type CompileTimeInferShapeContext::GetVarType(
695 696 697
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
698

F
fengjiayi 已提交
699 700 701 702 703
InferShapeVarPtr CompileTimeInferShapeContext::GetVarPtr(
    const std::string &name) {
  return block_.FindVarRecursive(name);
}

F
fengjiayi 已提交
704 705
}  // namespace framework
}  // namespace paddle