op_desc.cc 28.4 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/op_desc.h"
16

17
#include <algorithm>
Y
Yu Yang 已提交
18
#include <functional>
19 20
#include <mutex>  // NOLINT
#include <string>
Y
Yu Yang 已提交
21
#include <unordered_map>
22
#include <utility>
23

24
#include "glog/logging.h"
Y
Yi Wang 已提交
25
#include "paddle/fluid/framework/block_desc.h"
26
#include "paddle/fluid/framework/op_call_stack.h"
Y
yuyang18 已提交
27
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yi Wang 已提交
28 29 30
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/shape_inference.h"
M
minqiyang 已提交
31
#include "paddle/fluid/framework/var_type_inference.h"
Y
Yu Yang 已提交
32

F
fengjiayi 已提交
33 34 35
namespace paddle {
namespace framework {

Y
Yu Yang 已提交
36 37
class OpDesc;
class BlockDesc;
38 39
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
40
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
41 42 43 44 45 46 47 48 49 50 51

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  AttrReader Attrs() const override;

H
hong 已提交
52
  std::vector<std::string> Inputs(const std::string &name) const override;
53

H
hong 已提交
54
  std::vector<std::string> Outputs(const std::string &name) const override;
55

56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
  std::string GetInputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(idx, op_proto->inputs().size(),
                      platform::errors::OutOfRange(
                          "The index should be less than the size of inputs of "
                          "operator %s, but got index is %d and size is %d",
                          op_.Type(), idx, op_proto->inputs().size()));
    return op_proto->inputs()[idx].name();
  }

  std::string GetOutputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(
        idx, op_proto->outputs().size(),
        platform::errors::OutOfRange(
            "The index should be less than the size of outputs of "
            "operator %s, but got index is %d and size is %d",
            op_.Type(), idx, op_proto->outputs().size()));
    return op_proto->outputs()[idx].name();
  }

79 80 81 82
  void ShareDim(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
H
hong 已提交
83 84
    std::string input_n = Inputs(in)[i];
    std::string output_n = Outputs(out)[j];
85 86 87 88 89 90 91 92 93 94 95 96 97 98 99

    PADDLE_ENFORCE(input_n != framework::kEmptyVarName, "The %s[%d] is @EMPTY@",
                   in, i);
    PADDLE_ENFORCE(output_n != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", out, j);

    auto *in_var = block_.FindVarRecursive(input_n);
    auto *out_var = block_.FindVarRecursive(output_n);

    PADDLE_ENFORCE(in_var->GetType() == out_var->GetType(),
                   "The type of %s and %s is not the same.", input_n, output_n);

    SetDim(output_n, GetDim(input_n));
  }

H
hong 已提交
100 101 102 103 104 105 106 107
  void ShareAllLoD(const std::string &in,
                   const std::string &out) const override {
    auto &in_var_names = op_.Input(in);
    auto &out_var_names = op_.Output(out);

    PADDLE_ENFORCE_EQ(
        in_var_names.size(), out_var_names.size(),
        platform::errors::PreconditionNotMet(
T
tianshuo78520a 已提交
108
            "Op [%s]:  Input var number should be equal with output var number",
H
hong 已提交
109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126
            op_.Type()));

    for (size_t i = 0; i < in_var_names.size(); ++i) {
      if (out_var_names[i] == framework::kEmptyVarName) {
        continue;
      }

      auto *in_var = block_.FindVarRecursive(in_var_names[i]);
      auto *out_var = block_.FindVarRecursive(out_var_names[i]);
      if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
          in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
        VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
        return;
      }
      out_var->SetLoDLevel(in_var->GetLoDLevel());
    }
  }

Q
Qiao Longfei 已提交
127 128 129 130
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size());
    PADDLE_ENFORCE_LT(j, Outputs(out).size());
C
chengduo 已提交
131 132 133 134
    PADDLE_ENFORCE(Inputs(in)[i] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", in, i);
    PADDLE_ENFORCE(Outputs(out)[j] != framework::kEmptyVarName,
                   "The %s[%d] is @EMPTY@", out, j);
Q
Qiao Longfei 已提交
135 136
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
C
chengduo 已提交
137 138
    if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
        in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
139
      VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
X
fix  
Xin Pan 已提交
140 141
      return;
    }
142
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
143
  }
D
dzhwinter 已提交
144

145 146 147 148 149 150 151
  int32_t GetLoDLevel(const std::string &in, size_t i = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      "Input %s of operator %s only has %d elements.", in,
                      op_.Type(), Inputs(in).size());
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
                      "Input %s[%d] of operator %s is @EMPTY@", in, op_.Type(),
                      i);
C
chengduo 已提交
152
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
153 154 155 156
    PADDLE_ENFORCE_NOT_NULL(
        in_var, "Input %s[%d] of operator %s should not be nullptr.", in,
        op_.Type(), i);
    return in_var->GetLoDLevel();
C
chengduo 已提交
157 158
  }

159 160 161 162 163
  void SetLoDLevel(const std::string &out, int32_t lod_level,
                   size_t j = 0) const override {
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      "Output %s of operator %s only has %d elements.", out,
                      op_.Type(), Outputs(out).size());
164
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
165 166
                      "Output %s[%d] of operator %s is @EMPTY@", out,
                      op_.Type(), j);
167
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
168 169 170 171 172 173
    PADDLE_ENFORCE_NOT_NULL(
        out_var, "Output %s[%d] of operator %s should not be nullptr.", out,
        op_.Type(), j);
    if (lod_level >= 0) {
      out_var->SetLoDLevel(lod_level);
    }
174 175
  }

176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
  std::vector<InferShapeVarPtr> GetInputVarPtrs(
      const std::string &name) override {
    const std::vector<std::string> arg_names = Inputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

  std::vector<InferShapeVarPtr> GetOutputVarPtrs(
      const std::string &name) override {
    const std::vector<std::string> arg_names = Outputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

X
Xin Pan 已提交
200 201 202 203 204 205 206 207 208 209 210 211 212
  DDim GetInputDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
                      "Input(%s) should hold one element, but now it holds %d",
                      name, arg_names.size());
    return this->GetDim(arg_names[0]);
  }

  std::vector<DDim> GetInputsDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    return GetDims(arg_names);
  }

213 214
  bool IsRuntime() const override;

X
Xin Pan 已提交
215 216 217 218 219 220 221 222 223 224
  std::vector<proto::VarType::Type> GetInputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Inputs(name));
  }

  std::vector<proto::VarType::Type> GetOutputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Outputs(name));
  }

X
Xin Pan 已提交
225
  void SetOutputDim(const std::string &name, const DDim &dim) override {
H
hong 已提交
226
    auto arg_names = Outputs(name);
X
Xin Pan 已提交
227 228 229 230 231 232 233 234
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
                      "Output(%s) should hold one element, but now it holds %d",
                      name, arg_names.size());
    SetDim(arg_names[0], dim);
  }

  void SetOutputsDim(const std::string &name,
                     const std::vector<DDim> &dims) override {
H
hong 已提交
235
    auto names = Outputs(name);
X
Xin Pan 已提交
236 237 238
    SetDims(names, dims);
  }

239
 protected:
X
Xin Pan 已提交
240 241 242 243 244 245 246 247 248 249 250 251
  std::vector<proto::VarType::Type> GetVarTypes(
      const std::vector<std::string> &names) const {
    std::vector<proto::VarType::Type> retv;
    retv.resize(names.size());
    std::transform(
        names.begin(), names.end(), retv.begin(),
        std::bind(std::mem_fn(&CompileTimeInferShapeContext::GetVarType), this,
                  std::placeholders::_1));
    return retv;
  }

  proto::VarType::Type GetVarType(const std::string &name) const;
Q
Qiao Longfei 已提交
252

X
Xin Pan 已提交
253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274
  DDim GetDim(const std::string &name) const {
    auto var = block_.FindVarRecursive(name);
    PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
    DDim res;
    try {
      auto shape = var->GetShape();
      res = shape.empty() ? make_ddim({0UL}) : make_ddim(shape);
    } catch (...) {
      VLOG(5) << "GetDim of variable " << name << " error";
      std::rethrow_exception(std::current_exception());
    }
    return res;
  }

  std::vector<DDim> GetDims(const std::vector<std::string> &names) const {
    std::vector<DDim> ret;
    ret.reserve(names.size());
    std::transform(
        names.begin(), names.end(), std::back_inserter(ret),
        [this](const std::string &name) { return this->GetDim(name); });
    return ret;
  }
275

X
Xin Pan 已提交
276 277 278 279 280 281 282 283 284 285 286 287 288
  void SetDim(const std::string &name, const DDim &dim);

  void SetDims(const std::vector<std::string> &names,
               const std::vector<DDim> &dims) {
    size_t length = names.size();
    PADDLE_ENFORCE_EQ(length, dims.size());
    for (size_t i = 0; i < length; ++i) {
      if (names[i] == framework::kEmptyVarName) {
        continue;
      }
      SetDim(names[i], dims[i]);
    }
  }
289

F
fengjiayi 已提交
290 291 292 293
  std::vector<DDim> GetRepeatedDims(const std::string &name) const override;

  void SetRepeatedDims(const std::string &name,
                       const std::vector<DDim> &dims) override;
F
fengjiayi 已提交
294

Y
Yu Yang 已提交
295 296
  const OpDesc &op_;
  const BlockDesc &block_;
297 298
};

Y
Yu Yang 已提交
299 300
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
301
  desc_.set_type(type);
F
fengjiayi 已提交
302 303 304
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
305
  need_update_ = true;
L
liuwei1031 已提交
306
  block_ = nullptr;
F
fengjiayi 已提交
307 308
}

X
Xin Pan 已提交
309 310 311 312 313 314
OpDesc::OpDesc(const OpDesc &other, BlockDesc *block) {
  CopyFrom(other);
  block_ = block;
  need_update_ = true;
}

315
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
316 317 318 319 320 321 322
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
  need_update_ = true;
}

F
fengjiayi 已提交
323
OpDesc::OpDesc(const proto::OpDesc &desc, BlockDesc *block)
324 325 326 327
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
328
    const proto::OpDesc::Var &var = desc_.inputs(i);
329 330 331 332 333 334 335 336 337 338
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
339
    const proto::OpDesc::Var &var = desc_.outputs(i);
340 341 342 343 344 345 346 347
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
348
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
349
    std::string attr_name = attr.name();
350
    // The sub_block referred to by the BLOCK attr hasn't been added
X
Xin Pan 已提交
351 352 353
    // to ProgramDesc class yet, we skip setting BLOCK/BLOCKS attr here.
    if (attr.type() != proto::AttrType::BLOCK &&
        attr.type() != proto::AttrType::BLOCKS) {
354 355
      attrs_[attr_name] = GetAttrValue(attr);
    }
356
  }
357
  this->block_ = block;
358 359
}

Y
Yu Yang 已提交
360
proto::OpDesc *OpDesc::Proto() {
361
  Flush();
362
  return &desc_;
F
fengjiayi 已提交
363 364
}

Y
Yu Yang 已提交
365
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
366 367 368 369 370 371
  auto it = inputs_.find(name);
  PADDLE_ENFORCE(it != inputs_.end(), "Input %s cannot be found in Op %s", name,
                 Type());
  return it->second;
}

Y
Yu Yang 已提交
372
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
373 374 375 376 377 378 379
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
380 381
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
382 383 384 385
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
386
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
387 388 389 390 391 392
  auto it = outputs_.find(name);
  PADDLE_ENFORCE(it != outputs_.end(), "Output %s cannot be found in Op %s",
                 name, Type());
  return it->second;
}

393 394 395 396
bool OpDesc::HasOutput(const std::string &name) const {
  return outputs_.find(name) != outputs_.end();
}

Y
Yu Yang 已提交
397
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
398 399 400 401 402 403 404
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
405 406
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
407 408 409 410
  need_update_ = true;
  this->outputs_[param_name] = args;
}

411 412 413 414 415 416 417 418 419 420
bool OpDesc::HasProtoAttr(const std::string &name) const {
  auto &op_info = OpInfoMap::Instance();
  if (op_info.Has(desc_.type())) {
    auto op_info_ptr = op_info.Get(desc_.type());
    if (op_info_ptr.HasOpProtoAndChecker()) {
      const proto::OpProto &proto = op_info_ptr.Proto();
      for (int i = 0; i != proto.attrs_size(); ++i) {
        const proto::OpProto::Attr &attr = proto.attrs(i);
        if (attr.name() == name) {
          return true;
L
luotao1 已提交
421 422
        }
      }
L
luotao1 已提交
423 424 425 426 427
    }
  }
  return false;
}

Y
Yu Yang 已提交
428
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
429 430
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
431
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
432 433
}

Y
Yu Yang 已提交
434
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
435 436 437 438 439 440 441 442
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

443 444 445 446 447
void OpDesc::RemoveAttr(const std::string &name) {
  attrs_.erase(name);
  need_update_ = true;
}

Y
Yu Yang 已提交
448
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
M
minqiyang 已提交
449 450 451 452 453
  // NOTICE(minqiyang): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
454
      BOOST_GET_CONST(std::vector<int>, v).size() == 0u) {
M
minqiyang 已提交
455
    // Find current attr via attr name and set the correct attribute value
M
minqiyang 已提交
456
    const proto::OpProto::Attr &attr = GetProtoAttr(name);
M
minqiyang 已提交
457 458
    switch (attr.type()) {
      case proto::AttrType::BOOLEANS: {
M
minqiyang 已提交
459 460
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BOOLEANS";
M
minqiyang 已提交
461 462 463 464
        this->attrs_[name] = std::vector<bool>();
        break;
      }
      case proto::AttrType::INTS: {
M
minqiyang 已提交
465 466
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to INTS";
M
minqiyang 已提交
467 468 469
        this->attrs_[name] = std::vector<int>();
        break;
      }
470
      case proto::AttrType::LONGS: {
M
minqiyang 已提交
471 472
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from LONGS to LONGS";
473 474 475
        this->attrs_[name] = std::vector<int64_t>();
        break;
      }
M
minqiyang 已提交
476
      case proto::AttrType::FLOATS: {
M
minqiyang 已提交
477 478
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to FLOATS";
M
minqiyang 已提交
479 480 481 482
        this->attrs_[name] = std::vector<float>();
        break;
      }
      case proto::AttrType::STRINGS: {
M
minqiyang 已提交
483 484
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to STRINGS";
M
minqiyang 已提交
485 486 487 488
        this->attrs_[name] = std::vector<std::string>();
        break;
      }
      case proto::AttrType::BLOCKS: {
M
minqiyang 已提交
489 490
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BLOCKS";
M
minqiyang 已提交
491
        this->SetBlocksAttr(name, std::vector<BlockDesc *>());
M
minqiyang 已提交
492 493
        return;
      }
M
minqiyang 已提交
494 495
      default:
        PADDLE_THROW("Wrong attr type %d", attr.type());
M
minqiyang 已提交
496
    }
M
minqiyang 已提交
497 498
    need_update_ = true;
    return;
M
minqiyang 已提交
499 500
  }

501 502 503
  // In order to set bool attr properly
  if (attr_type == proto::AttrType::INT && HasProtoAttr(name) &&
      GetProtoAttr(name).type() == proto::AttrType::BOOLEAN) {
504
    this->attrs_[name] = static_cast<bool>(BOOST_GET_CONST(int, v));
505 506 507 508
    need_update_ = true;
    return;
  }

F
fengjiayi 已提交
509 510 511 512
  this->attrs_[name] = v;
  need_update_ = true;
}

A
Abhinav Arora 已提交
513 514
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc *block) {
  this->attrs_[name] = block;
F
fengjiayi 已提交
515
  need_update_ = true;
F
fengjiayi 已提交
516 517
}

518 519 520 521 522 523
void OpDesc::SetBlocksAttr(const std::string &name,
                           std::vector<BlockDesc *> blocks) {
  this->attrs_[name] = blocks;
  need_update_ = true;
}

Y
Yu Yang 已提交
524
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
525 526 527 528 529
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
530
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
531 532 533 534 535
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
  return it->second;
}

M
minqiyang 已提交
536 537 538
const proto::OpProto::Attr &OpDesc::GetProtoAttr(
    const std::string &name) const {
  const proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
M
minqiyang 已提交
539 540 541 542 543 544 545 546 547 548
  for (int i = 0; i != proto.attrs_size(); ++i) {
    const proto::OpProto::Attr &attr = proto.attrs(i);
    if (attr.name() == name) {
      return attr;
    }
  }

  PADDLE_THROW("Attribute %s is not found in proto %s", name, proto.type());
}

Y
yuyang18 已提交
549
Attribute OpDesc::GetNullableAttr(const std::string &name) const {
Y
Fix bug  
yuyang18 已提交
550 551 552 553
  auto it = attrs_.find(name);
  if (it != attrs_.end()) {
    return it->second;
  } else {
Y
yuyang18 已提交
554
    return Attribute();
Y
Fix bug  
yuyang18 已提交
555 556 557
  }
}

G
gongweibao 已提交
558 559 560
std::vector<int> OpDesc::GetBlocksAttrIds(const std::string &name) const {
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
561
  auto blocks = BOOST_GET_CONST(std::vector<BlockDesc *>, it->second);
G
gongweibao 已提交
562 563 564 565 566 567 568 569 570 571

  std::vector<int> ids;
  for (auto n : blocks) {
    ids.push_back(n->ID());
  }

  return ids;
}

int OpDesc::GetBlockAttrId(const std::string &name) const {
F
fengjiayi 已提交
572 573
  auto it = attrs_.find(name);
  PADDLE_ENFORCE(it != attrs_.end(), "Attribute %s is not found", name);
574
  return BOOST_GET_CONST(BlockDesc *, it->second)->ID();
F
fengjiayi 已提交
575 576
}

Y
Yu Yang 已提交
577
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
578 579 580
  return attrs_;
}

Y
Yu Yang 已提交
581
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
Y
Yancey1989 已提交
582 583
  RenameInput(old_name, new_name);
  RenameOutput(old_name, new_name);
F
fengjiayi 已提交
584 585 586
  need_update_ = true;
}

Y
Yu Yang 已提交
587 588
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
589 590 591 592
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
Y
yuyang18 已提交
593 594 595

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
596
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
yuyang18 已提交
597 598 599
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
600 601 602
  need_update_ = true;
}

Y
Yu Yang 已提交
603 604
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
605 606 607
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
Y
Yancey1989 已提交
608 609 610

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
611
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
Yancey1989 已提交
612 613 614
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
615 616 617
  need_update_ = true;
}

Y
Yu Yang 已提交
618
struct SetAttrDescVisitor : public boost::static_visitor<void> {
619 620
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
621 622 623
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
624 625 626 627 628 629 630

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
631 632 633 634 635 636 637 638 639 640 641 642 643

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
644 645 646
  void operator()(const std::vector<BlockDesc *> &v) const {
    std::vector<int> blocks_idx;
    for (auto blk : v) {
T
tangwei12 已提交
647
      blocks_idx.push_back(blk->ID());
648 649 650
    }
    VectorToRepeated(blocks_idx, attr_->mutable_blocks_idx());
  }
T
tangwei12 已提交
651 652 653

  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }

654
  void operator()(int64_t v) const { attr_->set_l(v); }
T
tangwei12 已提交
655 656 657 658 659

  void operator()(const std::vector<int64_t> &v) const {
    VectorToRepeated(v, attr_->mutable_longs());
  }

Y
Yu Yang 已提交
660 661 662
  void operator()(boost::blank) const { PADDLE_THROW("Unexpected branch"); }
};

Y
Yu Yang 已提交
663
void OpDesc::Flush() {
F
fengjiayi 已提交
664
  if (need_update_) {
665
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
666
    for (auto &ipt : inputs_) {
667
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
668 669 670 671
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

672
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
673
    for (auto &opt : outputs_) {
674
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
675 676 677 678
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

679
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
680
    for (auto &attr : attrs_) {
681
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
682 683
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
684
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
685 686
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
687 688 689 690 691
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
692

Y
Yu Yang 已提交
693
void OpDesc::CheckAttrs() {
F
fengjiayi 已提交
694
  PADDLE_ENFORCE(!Type().empty(),
T
tianshuo78520a 已提交
695
                 "CheckAttr() can not be called before type is set.");
Y
Yu Yang 已提交
696 697 698 699 700 701
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
702
  VLOG(10) << "begin to check attribute of " << Type();
T
tangwei12 已提交
703
  checker->Check(&attrs_);
F
fengjiayi 已提交
704 705
}

Y
Yu Yang 已提交
706
void OpDesc::InferShape(const BlockDesc &block) const {
707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726
  try {
    VLOG(3) << "CompileTime infer shape on " << Type();
    auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
    PADDLE_ENFORCE(static_cast<bool>(infer_shape),
                   "%s's infer_shape has not been registered", this->Type());
    CompileTimeInferShapeContext ctx(*this, block);
    if (VLOG_IS_ON(10)) {
      std::ostringstream sout;
      auto inames = this->InputArgumentNames();
      sout << " From [";
      std::copy(inames.begin(), inames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "] to [";
      auto onames = this->OutputArgumentNames();
      std::copy(onames.begin(), onames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "]";
      VLOG(10) << sout.str();
    }
    infer_shape(&ctx);
727
  } catch (platform::EnforceNotMet &exception) {
728
    framework::AppendErrorOpHint(Type(), &exception);
729 730 731 732
    throw std::move(exception);
  } catch (...) {
    std::rethrow_exception(std::current_exception());
  }
Y
Yu Yang 已提交
733 734
}

Y
Yu Yang 已提交
735
void OpDesc::InferVarType(BlockDesc *block) const {
X
Xin Pan 已提交
736 737
  // There are a few places that var type can be set.
  // When VarDesc is created, default set to LOD_TENSOR.
T
tianshuo78520a 已提交
738
  // When output variable is created, default is default set to LOD_TENSOR.
X
Xin Pan 已提交
739 740
  // We limit here to be the only place that operator defines its customized
  // var type inference. Hence, we don't do any "default" setting here.
Y
Yu Yang 已提交
741 742
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
M
minqiyang 已提交
743
    InferVarTypeContext context(this, block);
M
minqiyang 已提交
744
    info.infer_var_type_(&context);
Y
Yu Yang 已提交
745 746 747
  }
}

748
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
749
    const OpDesc &op, const BlockDesc &block)
750 751 752
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
753 754 755
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
756 757 758 759 760 761 762 763 764 765 766 767 768
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Input(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
769 770 771
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
772 773 774 775 776 777 778 779 780 781 782 783 784
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
  PADDLE_ENFORCE_EQ(length, 1UL,
                    "Output(%s) should have only one value, "
                    "but it have %d now",
                    name, length);
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
785 786 787
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
788 789 790 791 792 793 794 795 796 797 798
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
799 800 801
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
802 803 804 805 806 807 808 809 810 811 812 813 814 815
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

H
hong 已提交
816
std::vector<std::string> CompileTimeInferShapeContext::Inputs(
817 818 819 820
    const std::string &name) const {
  return op_.Input(name);
}

H
hong 已提交
821
std::vector<std::string> CompileTimeInferShapeContext::Outputs(
822 823 824 825
    const std::string &name) const {
  return op_.Output(name);
}

F
fengjiayi 已提交
826
std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDims(
F
fengjiayi 已提交
827 828 829 830 831 832 833 834 835 836
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
      res.push_back(s.empty() ? make_ddim({0UL}) : make_ddim(s));
    }
  } catch (...) {
M
minqiyang 已提交
837
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
F
fengjiayi 已提交
838 839 840
    std::rethrow_exception(std::current_exception());
  }
  return res;
841 842 843 844
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
845
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
846
}
F
fengjiayi 已提交
847 848 849 850 851 852

void CompileTimeInferShapeContext::SetRepeatedDims(
    const std::string &name, const std::vector<DDim> &dims) {
  auto var = block_.FindVarRecursive(name);
  PADDLE_ENFORCE(var != nullptr, "Cannot find variable %s", name);
  std::vector<std::vector<int64_t>> dim_vec(dims.size());
853
  std::transform(dims.begin(), dims.end(), dim_vec.begin(), vectorize<>);
F
fengjiayi 已提交
854
  var->SetShapes(dim_vec);
855
}
F
fengjiayi 已提交
856

857 858
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

859
proto::VarType::Type CompileTimeInferShapeContext::GetVarType(
860 861 862
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
863

F
fengjiayi 已提交
864 865
}  // namespace framework
}  // namespace paddle