op_desc.cc 25.0 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/op_desc.h"
16
#include <algorithm>
Y
Yu Yang 已提交
17
#include <functional>
18 19
#include <mutex>  // NOLINT
#include <string>
Y
Yu Yang 已提交
20
#include <unordered_map>
21
#include "glog/logging.h"
Y
Yi Wang 已提交
22
#include "paddle/fluid/framework/block_desc.h"
Y
yuyang18 已提交
23
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yi Wang 已提交
24 25 26
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/shape_inference.h"
Y
Yu Yang 已提交
27

F
fengjiayi 已提交
28 29 30
namespace paddle {
namespace framework {

Y
Yu Yang 已提交
31 32
class OpDesc;
class BlockDesc;
33 34
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
35
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  AttrReader Attrs() const override;

  const std::vector<std::string> &Inputs(
      const std::string &name) const override;

  const std::vector<std::string> &Outputs(
      const std::string &name) const override;

53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
  void ShareDim(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
    const std::string &input_n = Inputs(in)[i];
    const std::string &output_n = Outputs(out)[j];

    PADDLE_ENFORCE(input_n != framework::kEmptyVarName, "The %s[%d] is @EMPTY@",
                   in, i);
    PADDLE_ENFORCE(output_n != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", out, j);

    auto *in_var = block_.FindVarRecursive(input_n);
    auto *out_var = block_.FindVarRecursive(output_n);

    PADDLE_ENFORCE(in_var->GetType() == out_var->GetType(),
                   "The type of %s and %s is not the same.", input_n, output_n);

    SetDim(output_n, GetDim(input_n));
  }

Q
Qiao Longfei 已提交
74 75 76 77
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
C
chengduo 已提交
78 79 80 81
    PADDLE_ENFORCE(Inputs(in)[i] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", in, i);
    PADDLE_ENFORCE(Outputs(out)[j] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", out, j);
Q
Qiao Longfei 已提交
82 83
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
C
chengduo 已提交
84 85 86
    if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
        in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
      VLOG(3) << "input " << in << " is not LodTensor or LodTensorArray.";
X
fix  
Xin Pan 已提交
87 88
      return;
    }
89
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
90
  }
D
dzhwinter 已提交
91

C
chengduo 已提交
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
  void DecreaseLoDLevel(const std::string &in, const std::string &out,
                        size_t i = 0, size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
    PADDLE_ENFORCE(Inputs(in)[i] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", in, i);
    PADDLE_ENFORCE(Outputs(out)[j] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", out, j);
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
    PADDLE_ENFORCE(out_var->GetType() == proto::VarType::LOD_TENSOR_ARRAY ||
                       out_var->GetType() == proto::VarType::LOD_TENSOR,
                   "The input %s should be LodTensorArray or LodTensor.",
                   out_var->Name());
    PADDLE_ENFORCE(in_var->GetType() == proto::VarType::LOD_TENSOR,
                   "The input %s should be LodTensor.", in_var->Name());
    if (in_var->GetLoDLevel() > 0) {
      out_var->SetLoDLevel(in_var->GetLoDLevel() - 1);
    }
  }

113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136
  std::vector<InferShapeVarPtr> GetInputVarPtrs(
      const std::string &name) override {
    const std::vector<std::string> arg_names = Inputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

  std::vector<InferShapeVarPtr> GetOutputVarPtrs(
      const std::string &name) override {
    const std::vector<std::string> arg_names = Outputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

X
Xin Pan 已提交
137 138 139 140 141 142 143 144 145 146 147 148 149
  DDim GetInputDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
                      "Input(%s) should hold one element, but now it holds %d",
                      name, arg_names.size());
    return this->GetDim(arg_names[0]);
  }

  std::vector<DDim> GetInputsDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    return GetDims(arg_names);
  }

150 151
  bool IsRuntime() const override;

X
Xin Pan 已提交
152 153 154 155 156 157 158 159 160 161
  std::vector<proto::VarType::Type> GetInputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Inputs(name));
  }

  std::vector<proto::VarType::Type> GetOutputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Outputs(name));
  }

162
 protected:
X
Xin Pan 已提交
163 164 165 166 167 168 169 170 171 172 173 174
  std::vector<proto::VarType::Type> GetVarTypes(
      const std::vector<std::string> &names) const {
    std::vector<proto::VarType::Type> retv;
    retv.resize(names.size());
    std::transform(
        names.begin(), names.end(), retv.begin(),
        std::bind(std::mem_fn(&CompileTimeInferShapeContext::GetVarType), this,
                  std::placeholders::_1));
    return retv;
  }

  proto::VarType::Type GetVarType(const std::string &name) const;
Q
Qiao Longfei 已提交
175

X
Xin Pan 已提交
176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
  DDim GetDim(const std::string &name) const {
    auto var = block_.FindVarRecursive(name);
    PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
    DDim res;
    try {
      auto shape = var->GetShape();
      res = shape.empty() ? make_ddim({0UL}) : make_ddim(shape);
    } catch (...) {
      VLOG(5) << "GetDim of variable " << name << " error";
      std::rethrow_exception(std::current_exception());
    }
    return res;
  }

  std::vector<DDim> GetDims(const std::vector<std::string> &names) const {
    std::vector<DDim> ret;
    ret.reserve(names.size());
    std::transform(
        names.begin(), names.end(), std::back_inserter(ret),
        [this](const std::string &name) { return this->GetDim(name); });
    return ret;
  }
198 199 200

  void SetDim(const std::string &name, const DDim &dim) override;

F
fengjiayi 已提交
201 202 203 204
  std::vector<DDim> GetRepeatedDims(const std::string &name) const override;

  void SetRepeatedDims(const std::string &name,
                       const std::vector<DDim> &dims) override;
F
fengjiayi 已提交
205

Y
Yu Yang 已提交
206 207
  const OpDesc &op_;
  const BlockDesc &block_;
208 209
};

Y
Yu Yang 已提交
210 211
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
212
  desc_.set_type(type);
F
fengjiayi 已提交
213 214 215
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
216
  need_update_ = true;
F
fengjiayi 已提交
217 218
}

X
Xin Pan 已提交
219 220 221 222 223 224
OpDesc::OpDesc(const OpDesc &other, BlockDesc *block) {
  CopyFrom(other);
  block_ = block;
  need_update_ = true;
}

225
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
226 227 228 229 230 231 232
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
  need_update_ = true;
}

F
fengjiayi 已提交
233
OpDesc::OpDesc(const proto::OpDesc &desc, BlockDesc *block)
234 235 236 237
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
238
    const proto::OpDesc::Var &var = desc_.inputs(i);
239 240 241 242 243 244 245 246 247 248
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
249
    const proto::OpDesc::Var &var = desc_.outputs(i);
250 251 252 253 254 255 256 257
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
258
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
259
    std::string attr_name = attr.name();
260
    // The sub_block referred to by the BLOCK attr hasn't been added
X
Xin Pan 已提交
261 262 263
    // to ProgramDesc class yet, we skip setting BLOCK/BLOCKS attr here.
    if (attr.type() != proto::AttrType::BLOCK &&
        attr.type() != proto::AttrType::BLOCKS) {
264 265
      attrs_[attr_name] = GetAttrValue(attr);
    }
266
  }
267
  this->block_ = block;
268 269
}

Y
Yu Yang 已提交
270
proto::OpDesc *OpDesc::Proto() {
271
  Flush();
272
  return &desc_;
F
fengjiayi 已提交
273 274
}

Y
Yu Yang 已提交
275
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
276 277 278 279 280 281
  auto it = inputs_.find(name);
  PADDLE_ENFORCE(it != inputs_.end(), "Input %s cannot be found in Op %s", name,
                 Type());
  return it->second;
}

Y
Yu Yang 已提交
282
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
283 284 285 286 287 288 289
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
290 291
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
292 293 294 295
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
296
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
297 298 299 300 301 302
  auto it = outputs_.find(name);
  PADDLE_ENFORCE(it != outputs_.end(), "Output %s cannot be found in Op %s",
                 name, Type());
  return it->second;
}

Y
Yu Yang 已提交
303
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
304 305 306 307 308 309 310
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
311 312
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
313 314 315 316
  need_update_ = true;
  this->outputs_[param_name] = args;
}

317 318 319 320 321 322 323 324 325 326
bool OpDesc::HasProtoAttr(const std::string &name) const {
  auto &op_info = OpInfoMap::Instance();
  if (op_info.Has(desc_.type())) {
    auto op_info_ptr = op_info.Get(desc_.type());
    if (op_info_ptr.HasOpProtoAndChecker()) {
      const proto::OpProto &proto = op_info_ptr.Proto();
      for (int i = 0; i != proto.attrs_size(); ++i) {
        const proto::OpProto::Attr &attr = proto.attrs(i);
        if (attr.name() == name) {
          return true;
L
luotao1 已提交
327 328
        }
      }
L
luotao1 已提交
329 330 331 332 333
    }
  }
  return false;
}

Y
Yu Yang 已提交
334
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
335 336
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
337
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
338 339
}

Y
Yu Yang 已提交
340
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
341 342 343 344 345 346 347 348
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

Y
Yu Yang 已提交
349
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
M
minqiyang 已提交
350 351 352 353 354 355 356
  // NOTICE(minqiyang): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
      boost::get<std::vector<int>>(v).size() == 0u) {
    // Find current attr via attr name and set the correct attribute value
M
minqiyang 已提交
357
    const proto::OpProto::Attr &attr = GetProtoAttr(name);
M
minqiyang 已提交
358 359
    switch (attr.type()) {
      case proto::AttrType::BOOLEANS: {
M
minqiyang 已提交
360 361
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BOOLEANS";
M
minqiyang 已提交
362 363 364 365
        this->attrs_[name] = std::vector<bool>();
        break;
      }
      case proto::AttrType::INTS: {
M
minqiyang 已提交
366 367
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to INTS";
M
minqiyang 已提交
368 369 370
        this->attrs_[name] = std::vector<int>();
        break;
      }
371
      case proto::AttrType::LONGS: {
M
minqiyang 已提交
372 373
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from LONGS to LONGS";
374 375 376
        this->attrs_[name] = std::vector<int64_t>();
        break;
      }
M
minqiyang 已提交
377
      case proto::AttrType::FLOATS: {
M
minqiyang 已提交
378 379
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to FLOATS";
M
minqiyang 已提交
380 381 382 383
        this->attrs_[name] = std::vector<float>();
        break;
      }
      case proto::AttrType::STRINGS: {
M
minqiyang 已提交
384 385
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to STRINGS";
M
minqiyang 已提交
386 387 388 389
        this->attrs_[name] = std::vector<std::string>();
        break;
      }
      case proto::AttrType::BLOCKS: {
M
minqiyang 已提交
390 391
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BLOCKS";
M
minqiyang 已提交
392
        this->SetBlocksAttr(name, std::vector<BlockDesc *>());
M
minqiyang 已提交
393 394
        return;
      }
M
minqiyang 已提交
395 396
      default:
        PADDLE_THROW("Wrong attr type %d", attr.type());
M
minqiyang 已提交
397
    }
M
minqiyang 已提交
398 399
    need_update_ = true;
    return;
M
minqiyang 已提交
400 401
  }

F
fengjiayi 已提交
402 403 404 405
  this->attrs_[name] = v;
  need_update_ = true;
}

A
Abhinav Arora 已提交
406 407
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc *block) {
  this->attrs_[name] = block;
F
fengjiayi 已提交
408
  need_update_ = true;
F
fengjiayi 已提交
409 410
}

411 412 413 414 415 416
void OpDesc::SetBlocksAttr(const std::string &name,
                           std::vector<BlockDesc *> blocks) {
  this->attrs_[name] = blocks;
  need_update_ = true;
}

Y
Yu Yang 已提交
417
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
418 419 420 421 422
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
423
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
424 425 426 427 428
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  return it->second;
}

M
minqiyang 已提交
429 430 431
const proto::OpProto::Attr &OpDesc::GetProtoAttr(
    const std::string &name) const {
  const proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
M
minqiyang 已提交
432 433 434 435 436 437 438 439 440 441
  for (int i = 0; i != proto.attrs_size(); ++i) {
    const proto::OpProto::Attr &attr = proto.attrs(i);
    if (attr.name() == name) {
      return attr;
    }
  }

  PADDLE_THROW("Attribute %s is not found in proto %s", name, proto.type());
}

Y
yuyang18 已提交
442
Attribute OpDesc::GetNullableAttr(const std::string &name) const {
Y
Fix bug  
yuyang18 已提交
443 444 445 446
  auto it = attrs_.find(name);
  if (it != attrs_.end()) {
    return it->second;
  } else {
Y
yuyang18 已提交
447
    return Attribute();
Y
Fix bug  
yuyang18 已提交
448 449 450
  }
}

G
gongweibao 已提交
451 452 453 454 455 456 457 458 459 460 461 462 463 464
std::vector<int> OpDesc::GetBlocksAttrIds(const std::string &name) const {
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  auto blocks = boost::get<std::vector<BlockDesc *>>(it->second);

  std::vector<int> ids;
  for (auto n : blocks) {
    ids.push_back(n->ID());
  }

  return ids;
}

int OpDesc::GetBlockAttrId(const std::string &name) const {
F
fengjiayi 已提交
465 466
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
Y
Yu Yang 已提交
467
  return boost::get<BlockDesc *>(it->second)->ID();
F
fengjiayi 已提交
468 469
}

Y
Yu Yang 已提交
470
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
471 472 473
  return attrs_;
}

Y
Yu Yang 已提交
474
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
Y
Yancey1989 已提交
475 476
  RenameInput(old_name, new_name);
  RenameOutput(old_name, new_name);
F
fengjiayi 已提交
477 478 479
  need_update_ = true;
}

Y
Yu Yang 已提交
480 481
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
482 483 484 485
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
Y
yuyang18 已提交
486 487 488 489 490 491 492

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
    auto &op_vars = boost::get<std::vector<std::string>>(it->second);
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
493 494 495
  need_update_ = true;
}

Y
Yu Yang 已提交
496 497
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
498 499 500
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
Y
Yancey1989 已提交
501 502 503 504 505 506 507

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
    auto &op_vars = boost::get<std::vector<std::string>>(it->second);
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
508 509 510
  need_update_ = true;
}

Y
Yu Yang 已提交
511
struct SetAttrDescVisitor : public boost::static_visitor<void> {
512 513
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
514 515 516
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
517 518 519 520 521 522 523

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
524 525 526 527 528 529 530 531 532 533 534 535 536

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
537 538 539
  void operator()(const std::vector<BlockDesc *> &v) const {
    std::vector<int> blocks_idx;
    for (auto blk : v) {
T
tangwei12 已提交
540
      blocks_idx.push_back(blk->ID());
541 542 543
    }
    VectorToRepeated(blocks_idx, attr_->mutable_blocks_idx());
  }
T
tangwei12 已提交
544 545 546

  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }

547
  void operator()(int64_t v) const { attr_->set_l(v); }
T
tangwei12 已提交
548 549 550 551 552

  void operator()(const std::vector<int64_t> &v) const {
    VectorToRepeated(v, attr_->mutable_longs());
  }

Y
Yu Yang 已提交
553 554 555
  void operator()(boost::blank) const { PADDLE_THROW("Unexpected branch"); }
};

Y
Yu Yang 已提交
556
void OpDesc::Flush() {
F
fengjiayi 已提交
557
  if (need_update_) {
558
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
559
    for (auto &ipt : inputs_) {
560
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
561 562 563 564
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

565
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
566
    for (auto &opt : outputs_) {
567
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
568 569 570 571
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

572
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
573
    for (auto &attr : attrs_) {
574
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
575 576
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
577
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
578 579
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
580 581 582 583 584
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
585

586 587 588 589 590 591 592 593 594
static std::once_flag init_infer_shape_funcs;

static void InitInferShapeFuncs() {
  std::call_once(init_infer_shape_funcs, [] {
    auto &map = OpInfoMap::Instance();
    auto &info_map = *map.mutable_map();

    for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) {
      auto op_type = kern_pair.first;
C
chengduoZH 已提交
595 596 597 598
      auto it = info_map.find(op_type);
      PADDLE_ENFORCE(it != info_map.end(), "%s has not been registered",
                     op_type);
      auto &op_info = it->second;
Y
Yiqun Liu 已提交
599 600
      auto op = static_cast<OperatorWithKernel *>(op_info.Creator()(
          "", VariableNameMap{}, VariableNameMap{}, AttributeMap{}));
601 602 603 604 605 606
      if (op_info.infer_shape_) {  // infer_shape has been registered.
        continue;
      }
      op_info.infer_shape_ = [op](InferShapeContext *ctx) {
        op->InferShape(ctx);
      };
Y
Yu Yang 已提交
607
    }
608
  });
Y
Yu Yang 已提交
609 610
}

Y
Yu Yang 已提交
611
void OpDesc::CheckAttrs() {
F
fengjiayi 已提交
612 613
  PADDLE_ENFORCE(!Type().empty(),
                 "CheckAttr() can not be called before type is setted.");
Y
Yu Yang 已提交
614 615 616 617 618 619
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
F
fengjiayi 已提交
620 621 622
  checker->Check(attrs_);
}

Y
Yu Yang 已提交
623
void OpDesc::InferShape(const BlockDesc &block) const {
M
minqiyang 已提交
624
  VLOG(3) << "CompileTime infer shape on " << Type();
625 626 627 628
  InitInferShapeFuncs();
  auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
  PADDLE_ENFORCE(static_cast<bool>(infer_shape),
                 "%s's infer_shape has not been registered", this->Type());
Y
Yu Yang 已提交
629
  CompileTimeInferShapeContext ctx(*this, block);
M
minqiyang 已提交
630
  if (VLOG_IS_ON(10)) {
Y
Yu Yang 已提交
631 632 633 634 635 636 637 638 639 640
    std::ostringstream sout;
    auto inames = this->InputArgumentNames();
    sout << " From [";
    std::copy(inames.begin(), inames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "] to [";
    auto onames = this->OutputArgumentNames();
    std::copy(onames.begin(), onames.end(),
              std::ostream_iterator<std::string>(sout, ", "));
    sout << "]";
M
minqiyang 已提交
641
    VLOG(10) << sout.str();
Y
Yu Yang 已提交
642
  }
643
  infer_shape(&ctx);
Y
Yu Yang 已提交
644 645
}

Y
Yu Yang 已提交
646
void OpDesc::InferVarType(BlockDesc *block) const {
X
Xin Pan 已提交
647 648 649 650 651
  // There are a few places that var type can be set.
  // When VarDesc is created, default set to LOD_TENSOR.
  // When output variable is created, default is defaut set to LOD_TENSOR.
  // We limit here to be the only place that operator defines its customized
  // var type inference. Hence, we don't do any "default" setting here.
Y
Yu Yang 已提交
652 653 654 655 656 657
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
    info.infer_var_type_(*this, block);
  }
}

658
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
659
    const OpDesc &op, const BlockDesc &block)
660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Output(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

const std::vector<std::string> &CompileTimeInferShapeContext::Inputs(
    const std::string &name) const {
  return op_.Input(name);
}

const std::vector<std::string> &CompileTimeInferShapeContext::Outputs(
    const std::string &name) const {
  return op_.Output(name);
}

F
fengjiayi 已提交
724
std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDims(
F
fengjiayi 已提交
725 726 727 728 729 730 731 732 733 734
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
      res.push_back(s.empty() ? make_ddim({0UL}) : make_ddim(s));
    }
  } catch (...) {
M
minqiyang 已提交
735
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
F
fengjiayi 已提交
736 737 738
    std::rethrow_exception(std::current_exception());
  }
  return res;
739 740 741 742
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
743
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
744
}
F
fengjiayi 已提交
745 746 747 748 749 750 751 752

void CompileTimeInferShapeContext::SetRepeatedDims(
    const std::string &name, const std::vector<DDim> &dims) {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
  std::vector<std::vector<int64_t>> dim_vec(dims.size());
  std::transform(dims.begin(), dims.end(), dim_vec.begin(), vectorize);
  var->SetShapes(dim_vec);
753
}
F
fengjiayi 已提交
754

755 756
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

757
proto::VarType::Type CompileTimeInferShapeContext::GetVarType(
758 759 760
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
761

F
fengjiayi 已提交
762 763
}  // namespace framework
}  // namespace paddle