op_desc.cc 32.5 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/op_desc.h"
16

17
#include <string>
18

19
#include "glog/logging.h"
Y
Yi Wang 已提交
20
#include "paddle/fluid/framework/block_desc.h"
21
#include "paddle/fluid/framework/op_call_stack.h"
Y
yuyang18 已提交
22
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yi Wang 已提交
23 24
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/shape_inference.h"
M
minqiyang 已提交
25
#include "paddle/fluid/framework/var_type_inference.h"
Y
Yu Yang 已提交
26

F
fengjiayi 已提交
27 28 29
namespace paddle {
namespace framework {

30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37 38 39 40 41 42 43

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

  bool HasInputs(const std::string &name) const override;

  bool HasOutputs(const std::string &name) const override;

  AttrReader Attrs() const override;

H
hong 已提交
44
  std::vector<std::string> Inputs(const std::string &name) const override;
45

H
hong 已提交
46
  std::vector<std::string> Outputs(const std::string &name) const override;
47

48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
  std::string GetInputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(idx, op_proto->inputs().size(),
                      platform::errors::OutOfRange(
                          "The index should be less than the size of inputs of "
                          "operator %s, but got index is %d and size is %d",
                          op_.Type(), idx, op_proto->inputs().size()));
    return op_proto->inputs()[idx].name();
  }

  std::string GetOutputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(
        idx, op_proto->outputs().size(),
        platform::errors::OutOfRange(
            "The index should be less than the size of outputs of "
            "operator %s, but got index is %d and size is %d",
            op_.Type(), idx, op_proto->outputs().size()));
    return op_proto->outputs()[idx].name();
  }

71 72
  void ShareDim(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) override {
73 74 75 76 77 78 79 80 81 82 83
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));

H
hong 已提交
84 85
    std::string input_n = Inputs(in)[i];
    std::string output_n = Outputs(out)[j];
86

87 88 89 90 91 92
    PADDLE_ENFORCE_NE(input_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(output_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
93 94 95 96

    auto *in_var = block_.FindVarRecursive(input_n);
    auto *out_var = block_.FindVarRecursive(output_n);

97 98 99 100 101 102 103
    PADDLE_ENFORCE_EQ(
        in_var->GetType(), out_var->GetType(),
        platform::errors::InvalidArgument(
            "The type of input %s and output %s do not match. The input type "
            "is %s, output type is %s.",
            input_n, output_n, DataTypeToString(in_var->GetType()),
            DataTypeToString(out_var->GetType())));
104 105 106 107

    SetDim(output_n, GetDim(input_n));
  }

H
hong 已提交
108 109 110 111 112 113 114 115
  void ShareAllLoD(const std::string &in,
                   const std::string &out) const override {
    auto &in_var_names = op_.Input(in);
    auto &out_var_names = op_.Output(out);

    PADDLE_ENFORCE_EQ(
        in_var_names.size(), out_var_names.size(),
        platform::errors::PreconditionNotMet(
T
tianshuo78520a 已提交
116
            "Op [%s]:  Input var number should be equal with output var number",
H
hong 已提交
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
            op_.Type()));

    for (size_t i = 0; i < in_var_names.size(); ++i) {
      if (out_var_names[i] == framework::kEmptyVarName) {
        continue;
      }

      auto *in_var = block_.FindVarRecursive(in_var_names[i]);
      auto *out_var = block_.FindVarRecursive(out_var_names[i]);
      if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
          in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
        VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
        return;
      }
      out_var->SetLoDLevel(in_var->GetLoDLevel());
    }
  }

Q
Qiao Longfei 已提交
135 136
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
Q
Qiao Longfei 已提交
153 154
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
C
chengduo 已提交
155 156
    if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
        in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
157
      VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
X
fix  
Xin Pan 已提交
158 159
      return;
    }
160
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
161
  }
D
dzhwinter 已提交
162

163 164
  int32_t GetLoDLevel(const std::string &in, size_t i = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
165 166 167 168
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, input "
                          "variable %s of operator %s only has %d elements.",
                          in, op_.Type(), Inputs(in).size()));
169
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
170 171 172
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] of operator %s is empty.",
                          in, i, op_.Type()));
C
chengduo 已提交
173
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
174
    PADDLE_ENFORCE_NOT_NULL(
175 176 177
        in_var, platform::errors::NotFound(
                    "The input variable %s[%d] of operator %s is not found.",
                    in, i, op_.Type()));
178
    return in_var->GetLoDLevel();
C
chengduo 已提交
179 180
  }

181 182 183
  void SetLoDLevel(const std::string &out, int32_t lod_level,
                   size_t j = 0) const override {
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
184 185 186 187
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, output "
                          "variable %s of operator %s only has %d elements.",
                          out, op_.Type(), Outputs(out).size()));
188
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
189 190 191
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] of operator %s is empty.",
                          out, j, op_.Type()));
192
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
193
    PADDLE_ENFORCE_NOT_NULL(
194 195 196
        out_var, platform::errors::NotFound(
                     "The output variable %s[%d] of operator %s is not found.",
                     out, j, op_.Type()));
197 198 199
    if (lod_level >= 0) {
      out_var->SetLoDLevel(lod_level);
    }
200 201
  }

202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225
  std::vector<InferShapeVarPtr> GetInputVarPtrs(
      const std::string &name) override {
    const std::vector<std::string> arg_names = Inputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

  std::vector<InferShapeVarPtr> GetOutputVarPtrs(
      const std::string &name) override {
    const std::vector<std::string> arg_names = Outputs(name);
    std::vector<InferShapeVarPtr> res;
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

X
Xin Pan 已提交
226 227 228
  DDim GetInputDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
229 230 231 232
                      platform::errors::InvalidArgument(
                          "The input(%s) should hold only one element, but now "
                          "it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
233 234 235 236 237 238 239 240
    return this->GetDim(arg_names[0]);
  }

  std::vector<DDim> GetInputsDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    return GetDims(arg_names);
  }

241 242
  bool IsRuntime() const override;

X
Xin Pan 已提交
243 244 245 246 247 248 249 250 251 252
  std::vector<proto::VarType::Type> GetInputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Inputs(name));
  }

  std::vector<proto::VarType::Type> GetOutputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Outputs(name));
  }

X
Xin Pan 已提交
253
  void SetOutputDim(const std::string &name, const DDim &dim) override {
H
hong 已提交
254
    auto arg_names = Outputs(name);
X
Xin Pan 已提交
255
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
256 257 258 259
                      platform::errors::InvalidArgument(
                          "The iutput(%s) should hold only one element, but "
                          "now it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
260 261 262 263 264
    SetDim(arg_names[0], dim);
  }

  void SetOutputsDim(const std::string &name,
                     const std::vector<DDim> &dims) override {
H
hong 已提交
265
    auto names = Outputs(name);
X
Xin Pan 已提交
266 267 268
    SetDims(names, dims);
  }

269
 protected:
X
Xin Pan 已提交
270 271 272 273 274 275 276 277 278 279 280 281
  std::vector<proto::VarType::Type> GetVarTypes(
      const std::vector<std::string> &names) const {
    std::vector<proto::VarType::Type> retv;
    retv.resize(names.size());
    std::transform(
        names.begin(), names.end(), retv.begin(),
        std::bind(std::mem_fn(&CompileTimeInferShapeContext::GetVarType), this,
                  std::placeholders::_1));
    return retv;
  }

  proto::VarType::Type GetVarType(const std::string &name) const;
Q
Qiao Longfei 已提交
282

X
Xin Pan 已提交
283 284
  DDim GetDim(const std::string &name) const {
    auto var = block_.FindVarRecursive(name);
285 286
    PADDLE_ENFORCE_NOT_NULL(
        var, platform::errors::NotFound("Variable %s is not found.", name));
X
Xin Pan 已提交
287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305
    DDim res;
    try {
      auto shape = var->GetShape();
      res = shape.empty() ? make_ddim({0UL}) : make_ddim(shape);
    } catch (...) {
      VLOG(5) << "GetDim of variable " << name << " error";
      std::rethrow_exception(std::current_exception());
    }
    return res;
  }

  std::vector<DDim> GetDims(const std::vector<std::string> &names) const {
    std::vector<DDim> ret;
    ret.reserve(names.size());
    std::transform(
        names.begin(), names.end(), std::back_inserter(ret),
        [this](const std::string &name) { return this->GetDim(name); });
    return ret;
  }
306

X
Xin Pan 已提交
307 308 309 310 311
  void SetDim(const std::string &name, const DDim &dim);

  void SetDims(const std::vector<std::string> &names,
               const std::vector<DDim> &dims) {
    size_t length = names.size();
312 313 314 315 316
    PADDLE_ENFORCE_EQ(length, dims.size(),
                      platform::errors::InvalidArgument(
                          "The input variables number(%d) and input dimensions "
                          "number(%d) do not match.",
                          length, dims.size()));
X
Xin Pan 已提交
317 318 319 320 321 322 323
    for (size_t i = 0; i < length; ++i) {
      if (names[i] == framework::kEmptyVarName) {
        continue;
      }
      SetDim(names[i], dims[i]);
    }
  }
324

F
fengjiayi 已提交
325 326 327 328
  std::vector<DDim> GetRepeatedDims(const std::string &name) const override;

  void SetRepeatedDims(const std::string &name,
                       const std::vector<DDim> &dims) override;
F
fengjiayi 已提交
329

Y
Yu Yang 已提交
330 331
  const OpDesc &op_;
  const BlockDesc &block_;
332 333
};

Y
Yu Yang 已提交
334 335
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
336
  desc_.set_type(type);
F
fengjiayi 已提交
337 338 339
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
340
  need_update_ = true;
L
liuwei1031 已提交
341
  block_ = nullptr;
F
fengjiayi 已提交
342 343
}

X
Xin Pan 已提交
344 345 346 347 348 349
OpDesc::OpDesc(const OpDesc &other, BlockDesc *block) {
  CopyFrom(other);
  block_ = block;
  need_update_ = true;
}

350
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
351 352 353 354 355
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
  need_update_ = true;
356 357 358 359 360 361 362 363
  // When creating graph from program, the creation of op node will create a new
  // OpDesc instead of
  // referring to the original one. To find the original OpDesc of the op node,
  // the id have to be
  // copied to the new OpDesc. The var node has the same situation, but the
  // default copy constructor
  // can copy the id automatically.
  id_ = op_desc.id_;
F
fengjiayi 已提交
364 365
}

F
fengjiayi 已提交
366
OpDesc::OpDesc(const proto::OpDesc &desc, BlockDesc *block)
367 368 369 370
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
371
    const proto::OpDesc::Var &var = desc_.inputs(i);
372 373 374 375 376 377 378 379 380 381
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
382
    const proto::OpDesc::Var &var = desc_.outputs(i);
383 384 385 386 387 388 389 390
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
391
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
392
    std::string attr_name = attr.name();
393
    // The sub_block referred to by the BLOCK attr hasn't been added
X
Xin Pan 已提交
394 395 396
    // to ProgramDesc class yet, we skip setting BLOCK/BLOCKS attr here.
    if (attr.type() != proto::AttrType::BLOCK &&
        attr.type() != proto::AttrType::BLOCKS) {
397 398
      attrs_[attr_name] = GetAttrValue(attr);
    }
399
  }
400
  this->block_ = block;
401 402
}

Y
Yu Yang 已提交
403
proto::OpDesc *OpDesc::Proto() {
404
  Flush();
405
  return &desc_;
F
fengjiayi 已提交
406 407
}

Y
Yu Yang 已提交
408
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
409
  auto it = inputs_.find(name);
410 411 412 413
  PADDLE_ENFORCE_NE(
      it, inputs_.end(),
      platform::errors::NotFound("Input %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
414 415 416
  return it->second;
}

Y
Yu Yang 已提交
417
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
418 419 420 421 422 423 424
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
425 426
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
427 428 429 430
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
431
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
432
  auto it = outputs_.find(name);
433 434 435 436
  PADDLE_ENFORCE_NE(
      it, outputs_.end(),
      platform::errors::NotFound("Output %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
437 438 439
  return it->second;
}

440 441 442 443
bool OpDesc::HasOutput(const std::string &name) const {
  return outputs_.find(name) != outputs_.end();
}

Y
Yu Yang 已提交
444
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
445 446 447 448 449 450 451
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
452 453
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
454 455 456 457
  need_update_ = true;
  this->outputs_[param_name] = args;
}

458 459 460 461 462
void OpDesc::RemoveOutput(const std::string &name) {
  outputs_.erase(name);
  need_update_ = true;
}

463 464 465 466 467
void OpDesc::RemoveInput(const std::string &name) {
  inputs_.erase(name);
  need_update_ = true;
}

468 469 470 471 472 473 474 475 476 477
bool OpDesc::HasProtoAttr(const std::string &name) const {
  auto &op_info = OpInfoMap::Instance();
  if (op_info.Has(desc_.type())) {
    auto op_info_ptr = op_info.Get(desc_.type());
    if (op_info_ptr.HasOpProtoAndChecker()) {
      const proto::OpProto &proto = op_info_ptr.Proto();
      for (int i = 0; i != proto.attrs_size(); ++i) {
        const proto::OpProto::Attr &attr = proto.attrs(i);
        if (attr.name() == name) {
          return true;
L
luotao1 已提交
478 479
        }
      }
L
luotao1 已提交
480 481 482 483 484
    }
  }
  return false;
}

Y
Yu Yang 已提交
485
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
486
  auto it = attrs_.find(name);
487 488
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
489
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
490 491
}

Y
Yu Yang 已提交
492
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
493 494 495 496 497 498 499 500
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

501 502 503 504 505
void OpDesc::RemoveAttr(const std::string &name) {
  attrs_.erase(name);
  need_update_ = true;
}

Y
Yu Yang 已提交
506
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
M
minqiyang 已提交
507 508 509 510 511
  // NOTICE(minqiyang): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
512
      BOOST_GET_CONST(std::vector<int>, v).size() == 0u) {
M
minqiyang 已提交
513
    // Find current attr via attr name and set the correct attribute value
M
minqiyang 已提交
514
    const proto::OpProto::Attr &attr = GetProtoAttr(name);
M
minqiyang 已提交
515 516
    switch (attr.type()) {
      case proto::AttrType::BOOLEANS: {
M
minqiyang 已提交
517 518
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BOOLEANS";
M
minqiyang 已提交
519 520 521 522
        this->attrs_[name] = std::vector<bool>();
        break;
      }
      case proto::AttrType::INTS: {
M
minqiyang 已提交
523 524
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to INTS";
M
minqiyang 已提交
525 526 527
        this->attrs_[name] = std::vector<int>();
        break;
      }
528
      case proto::AttrType::LONGS: {
M
minqiyang 已提交
529 530
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from LONGS to LONGS";
531 532 533
        this->attrs_[name] = std::vector<int64_t>();
        break;
      }
M
minqiyang 已提交
534
      case proto::AttrType::FLOATS: {
M
minqiyang 已提交
535 536
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to FLOATS";
M
minqiyang 已提交
537 538 539 540
        this->attrs_[name] = std::vector<float>();
        break;
      }
      case proto::AttrType::STRINGS: {
M
minqiyang 已提交
541 542
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to STRINGS";
M
minqiyang 已提交
543 544 545 546
        this->attrs_[name] = std::vector<std::string>();
        break;
      }
      case proto::AttrType::BLOCKS: {
M
minqiyang 已提交
547 548
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BLOCKS";
M
minqiyang 已提交
549
        this->SetBlocksAttr(name, std::vector<BlockDesc *>());
M
minqiyang 已提交
550 551
        return;
      }
M
minqiyang 已提交
552
      default:
553 554
        PADDLE_THROW(platform::errors::Unimplemented(
            "Unsupported attribute type (code %d).", attr.type()));
M
minqiyang 已提交
555
    }
M
minqiyang 已提交
556 557
    need_update_ = true;
    return;
M
minqiyang 已提交
558 559
  }

560 561 562
  // In order to set bool attr properly
  if (attr_type == proto::AttrType::INT && HasProtoAttr(name) &&
      GetProtoAttr(name).type() == proto::AttrType::BOOLEAN) {
563
    this->attrs_[name] = static_cast<bool>(BOOST_GET_CONST(int, v));
564 565 566 567
    need_update_ = true;
    return;
  }

F
fengjiayi 已提交
568 569 570 571
  this->attrs_[name] = v;
  need_update_ = true;
}

A
Abhinav Arora 已提交
572 573
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc *block) {
  this->attrs_[name] = block;
F
fengjiayi 已提交
574
  need_update_ = true;
F
fengjiayi 已提交
575 576
}

577 578 579 580 581 582
void OpDesc::SetBlocksAttr(const std::string &name,
                           std::vector<BlockDesc *> blocks) {
  this->attrs_[name] = blocks;
  need_update_ = true;
}

Y
Yu Yang 已提交
583
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
584 585 586 587 588
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
589
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
590
  auto it = attrs_.find(name);
591 592
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
F
fengjiayi 已提交
593 594 595
  return it->second;
}

M
minqiyang 已提交
596 597 598
const proto::OpProto::Attr &OpDesc::GetProtoAttr(
    const std::string &name) const {
  const proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
M
minqiyang 已提交
599 600 601 602 603 604 605
  for (int i = 0; i != proto.attrs_size(); ++i) {
    const proto::OpProto::Attr &attr = proto.attrs(i);
    if (attr.name() == name) {
      return attr;
    }
  }

606 607
  PADDLE_THROW(platform::errors::NotFound(
      "Attribute %s is not found in proto %s.", name, proto.type()));
M
minqiyang 已提交
608 609
}

Y
yuyang18 已提交
610
Attribute OpDesc::GetNullableAttr(const std::string &name) const {
Y
Fix bug  
yuyang18 已提交
611 612 613 614
  auto it = attrs_.find(name);
  if (it != attrs_.end()) {
    return it->second;
  } else {
Y
yuyang18 已提交
615
    return Attribute();
Y
Fix bug  
yuyang18 已提交
616 617 618
  }
}

G
gongweibao 已提交
619 620
std::vector<int> OpDesc::GetBlocksAttrIds(const std::string &name) const {
  auto it = attrs_.find(name);
621 622 623 624
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
625
  auto blocks = BOOST_GET_CONST(std::vector<BlockDesc *>, it->second);
G
gongweibao 已提交
626 627 628 629 630 631 632 633 634 635

  std::vector<int> ids;
  for (auto n : blocks) {
    ids.push_back(n->ID());
  }

  return ids;
}

int OpDesc::GetBlockAttrId(const std::string &name) const {
F
fengjiayi 已提交
636
  auto it = attrs_.find(name);
637 638 639 640
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
641
  return BOOST_GET_CONST(BlockDesc *, it->second)->ID();
F
fengjiayi 已提交
642 643
}

Y
Yu Yang 已提交
644
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
645 646 647
  return attrs_;
}

Y
Yu Yang 已提交
648
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
Y
Yancey1989 已提交
649 650
  RenameInput(old_name, new_name);
  RenameOutput(old_name, new_name);
F
fengjiayi 已提交
651 652 653
  need_update_ = true;
}

Y
Yu Yang 已提交
654 655
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
656 657 658 659
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
Y
yuyang18 已提交
660 661 662

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
663
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
yuyang18 已提交
664 665 666
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
667 668 669
  need_update_ = true;
}

Y
Yu Yang 已提交
670 671
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
672 673 674
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
Y
Yancey1989 已提交
675 676 677

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
678
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
Yancey1989 已提交
679 680 681
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
682 683 684
  need_update_ = true;
}

Y
Yu Yang 已提交
685
struct SetAttrDescVisitor : public boost::static_visitor<void> {
686 687
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
688 689 690
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
691 692 693 694 695 696 697

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
698 699 700 701 702 703 704 705 706 707 708 709 710

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
711 712 713
  void operator()(const std::vector<BlockDesc *> &v) const {
    std::vector<int> blocks_idx;
    for (auto blk : v) {
T
tangwei12 已提交
714
      blocks_idx.push_back(blk->ID());
715 716 717
    }
    VectorToRepeated(blocks_idx, attr_->mutable_blocks_idx());
  }
T
tangwei12 已提交
718 719 720

  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }

721
  void operator()(int64_t v) const { attr_->set_l(v); }
T
tangwei12 已提交
722 723 724 725 726

  void operator()(const std::vector<int64_t> &v) const {
    VectorToRepeated(v, attr_->mutable_longs());
  }

727 728 729 730
  void operator()(const std::vector<double> &v) const {
    VectorToRepeated(v, attr_->mutable_float64s());
  }

731 732 733 734 735
  void operator()(boost::blank) const {
    PADDLE_THROW(platform::errors::Unavailable(
        "Unsupported calling method of SetAttrDescVisitor object for "
        "`boosst::blank` type."));
  }
Y
Yu Yang 已提交
736 737
};

Y
Yu Yang 已提交
738
void OpDesc::Flush() {
F
fengjiayi 已提交
739
  if (need_update_) {
740
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
741
    for (auto &ipt : inputs_) {
742
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
743 744 745 746
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

747
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
748
    for (auto &opt : outputs_) {
749
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
750 751 752 753
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

754
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
755
    for (auto &attr : attrs_) {
756
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
757 758
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
759
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
760 761
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
762 763 764 765 766
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
767

Y
Yu Yang 已提交
768
void OpDesc::CheckAttrs() {
769 770 771
  PADDLE_ENFORCE_EQ(Type().empty(), false,
                    platform::errors::PreconditionNotMet(
                        "CheckAttrs() can not be called before type is set."));
Y
Yu Yang 已提交
772 773 774 775 776 777
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
778
  VLOG(10) << "begin to check attribute of " << Type();
T
tangwei12 已提交
779
  checker->Check(&attrs_);
F
fengjiayi 已提交
780 781
}

Y
Yu Yang 已提交
782
void OpDesc::InferShape(const BlockDesc &block) const {
783 784 785
  try {
    VLOG(3) << "CompileTime infer shape on " << Type();
    auto &infer_shape = OpInfoMap::Instance().Get(this->Type()).infer_shape_;
786 787 788 789
    PADDLE_ENFORCE_EQ(
        static_cast<bool>(infer_shape), true,
        platform::errors::NotFound(
            "Operator %s's infer_shape is not registered.", this->Type()));
790 791 792 793 794 795 796 797 798 799 800 801 802 803 804
    CompileTimeInferShapeContext ctx(*this, block);
    if (VLOG_IS_ON(10)) {
      std::ostringstream sout;
      auto inames = this->InputArgumentNames();
      sout << " From [";
      std::copy(inames.begin(), inames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "] to [";
      auto onames = this->OutputArgumentNames();
      std::copy(onames.begin(), onames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "]";
      VLOG(10) << sout.str();
    }
    infer_shape(&ctx);
805
  } catch (platform::EnforceNotMet &exception) {
806
    framework::AppendErrorOpHint(Type(), &exception);
807 808 809 810
    throw std::move(exception);
  } catch (...) {
    std::rethrow_exception(std::current_exception());
  }
Y
Yu Yang 已提交
811 812
}

Y
Yu Yang 已提交
813
void OpDesc::InferVarType(BlockDesc *block) const {
X
Xin Pan 已提交
814 815
  // There are a few places that var type can be set.
  // When VarDesc is created, default set to LOD_TENSOR.
T
tianshuo78520a 已提交
816
  // When output variable is created, default is default set to LOD_TENSOR.
X
Xin Pan 已提交
817 818
  // We limit here to be the only place that operator defines its customized
  // var type inference. Hence, we don't do any "default" setting here.
Y
Yu Yang 已提交
819 820
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
M
minqiyang 已提交
821
    InferVarTypeContext context(this, block);
M
minqiyang 已提交
822
    info.infer_var_type_(&context);
Y
Yu Yang 已提交
823 824 825
  }
}

826
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
827
    const OpDesc &op, const BlockDesc &block)
828 829 830
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
831 832 833
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
834 835 836 837 838
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
839 840 841 842
  PADDLE_ENFORCE_EQ(length, 1UL, platform::errors::InvalidArgument(
                                     "Input(%s) should have only one value, "
                                     "but it has %d values now.",
                                     name, length));
843 844 845 846
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
847 848 849
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
850 851 852 853 854
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
855 856 857 858
  PADDLE_ENFORCE_EQ(length, 1UL, platform::errors::InvalidArgument(
                                     "Output(%s) should have only one value, "
                                     "but it has %d values now.",
                                     name, length));
859 860 861 862
  return block_.HasVarRecursive(output_names[0]);
}

bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
863 864 865
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
866 867 868 869 870 871 872 873 874 875 876
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

bool CompileTimeInferShapeContext::HasOutputs(const std::string &name) const {
877 878 879
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
880 881 882 883 884 885 886 887 888 889 890 891 892 893
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
  for (auto &output : output_names) {
    if (!block_.HasVarRecursive(output)) return false;
  }
  return true;
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

H
hong 已提交
894
std::vector<std::string> CompileTimeInferShapeContext::Inputs(
895 896 897 898
    const std::string &name) const {
  return op_.Input(name);
}

H
hong 已提交
899
std::vector<std::string> CompileTimeInferShapeContext::Outputs(
900 901 902 903
    const std::string &name) const {
  return op_.Output(name);
}

F
fengjiayi 已提交
904
std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDims(
F
fengjiayi 已提交
905 906
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
907 908
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
909 910 911 912 913 914 915
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
      res.push_back(s.empty() ? make_ddim({0UL}) : make_ddim(s));
    }
  } catch (...) {
M
minqiyang 已提交
916
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
F
fengjiayi 已提交
917 918 919
    std::rethrow_exception(std::current_exception());
  }
  return res;
920 921 922 923
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
924
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
925
}
F
fengjiayi 已提交
926 927 928 929

void CompileTimeInferShapeContext::SetRepeatedDims(
    const std::string &name, const std::vector<DDim> &dims) {
  auto var = block_.FindVarRecursive(name);
930 931
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
932
  std::vector<std::vector<int64_t>> dim_vec(dims.size());
933
  std::transform(dims.begin(), dims.end(), dim_vec.begin(), vectorize<>);
F
fengjiayi 已提交
934
  var->SetShapes(dim_vec);
935
}
F
fengjiayi 已提交
936

937 938
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

939
proto::VarType::Type CompileTimeInferShapeContext::GetVarType(
940 941 942
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
943

F
fengjiayi 已提交
944 945
}  // namespace framework
}  // namespace paddle