op_desc.cc 33.6 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/op_desc.h"
16

17
#include <string>
18

19
#include "glog/logging.h"
Y
Yi Wang 已提交
20
#include "paddle/fluid/framework/block_desc.h"
21
#include "paddle/fluid/framework/op_call_stack.h"
Y
yuyang18 已提交
22
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yi Wang 已提交
23 24
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/shape_inference.h"
M
minqiyang 已提交
25
#include "paddle/fluid/framework/var_type_inference.h"
Y
Yu Yang 已提交
26

F
fengjiayi 已提交
27 28 29
namespace paddle {
namespace framework {

30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

38 39
  bool HasAttr(const std::string &name) const override;

40 41
  bool HasInputs(const std::string &name) const override;

42 43
  bool HasOutputs(const std::string &name,
                  bool allow_null = false) const override;
44 45 46

  AttrReader Attrs() const override;

H
hong 已提交
47
  std::vector<std::string> Inputs(const std::string &name) const override;
48

H
hong 已提交
49
  std::vector<std::string> Outputs(const std::string &name) const override;
50

51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
  std::string GetInputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(idx, op_proto->inputs().size(),
                      platform::errors::OutOfRange(
                          "The index should be less than the size of inputs of "
                          "operator %s, but got index is %d and size is %d",
                          op_.Type(), idx, op_proto->inputs().size()));
    return op_proto->inputs()[idx].name();
  }

  std::string GetOutputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(
        idx, op_proto->outputs().size(),
        platform::errors::OutOfRange(
            "The index should be less than the size of outputs of "
            "operator %s, but got index is %d and size is %d",
            op_.Type(), idx, op_proto->outputs().size()));
    return op_proto->outputs()[idx].name();
  }

74 75
  void ShareDim(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) override {
76 77 78 79 80 81 82 83 84 85 86
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));

H
hong 已提交
87 88
    std::string input_n = Inputs(in)[i];
    std::string output_n = Outputs(out)[j];
89

90 91 92 93 94 95
    PADDLE_ENFORCE_NE(input_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(output_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
96 97 98 99

    auto *in_var = block_.FindVarRecursive(input_n);
    auto *out_var = block_.FindVarRecursive(output_n);

100 101 102 103 104 105 106
    PADDLE_ENFORCE_EQ(
        in_var->GetType(), out_var->GetType(),
        platform::errors::InvalidArgument(
            "The type of input %s and output %s do not match. The input type "
            "is %s, output type is %s.",
            input_n, output_n, DataTypeToString(in_var->GetType()),
            DataTypeToString(out_var->GetType())));
107 108 109 110

    SetDim(output_n, GetDim(input_n));
  }

H
hong 已提交
111 112 113 114 115 116 117 118
  void ShareAllLoD(const std::string &in,
                   const std::string &out) const override {
    auto &in_var_names = op_.Input(in);
    auto &out_var_names = op_.Output(out);

    PADDLE_ENFORCE_EQ(
        in_var_names.size(), out_var_names.size(),
        platform::errors::PreconditionNotMet(
T
tianshuo78520a 已提交
119
            "Op [%s]:  Input var number should be equal with output var number",
H
hong 已提交
120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137
            op_.Type()));

    for (size_t i = 0; i < in_var_names.size(); ++i) {
      if (out_var_names[i] == framework::kEmptyVarName) {
        continue;
      }

      auto *in_var = block_.FindVarRecursive(in_var_names[i]);
      auto *out_var = block_.FindVarRecursive(out_var_names[i]);
      if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
          in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
        VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
        return;
      }
      out_var->SetLoDLevel(in_var->GetLoDLevel());
    }
  }

Q
Qiao Longfei 已提交
138 139
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
Q
Qiao Longfei 已提交
156 157
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
C
chengduo 已提交
158 159
    if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
        in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
160
      VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
X
fix  
Xin Pan 已提交
161 162
      return;
    }
163
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
164
  }
D
dzhwinter 已提交
165

166 167
  int32_t GetLoDLevel(const std::string &in, size_t i = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
168 169 170 171
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, input "
                          "variable %s of operator %s only has %d elements.",
                          in, op_.Type(), Inputs(in).size()));
172
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
173 174 175
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] of operator %s is empty.",
                          in, i, op_.Type()));
C
chengduo 已提交
176
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
177
    PADDLE_ENFORCE_NOT_NULL(
178 179 180
        in_var, platform::errors::NotFound(
                    "The input variable %s[%d] of operator %s is not found.",
                    in, i, op_.Type()));
181
    return in_var->GetLoDLevel();
C
chengduo 已提交
182 183
  }

184 185 186
  void SetLoDLevel(const std::string &out, int32_t lod_level,
                   size_t j = 0) const override {
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
187 188 189 190
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, output "
                          "variable %s of operator %s only has %d elements.",
                          out, op_.Type(), Outputs(out).size()));
191
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
192 193 194
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] of operator %s is empty.",
                          out, j, op_.Type()));
195
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
196
    PADDLE_ENFORCE_NOT_NULL(
197 198 199
        out_var, platform::errors::NotFound(
                     "The output variable %s[%d] of operator %s is not found.",
                     out, j, op_.Type()));
200 201 202
    if (lod_level >= 0) {
      out_var->SetLoDLevel(lod_level);
    }
203 204
  }

205 206
  paddle::SmallVector<InferShapeVarPtr, phi::kInputSmallVectorSize>
  GetInputVarPtrs(const std::string &name) const override {
207
    const std::vector<std::string> arg_names = Inputs(name);
208
    paddle::SmallVector<InferShapeVarPtr, phi::kInputSmallVectorSize> res;
209 210 211 212 213 214 215 216
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

217 218
  paddle::SmallVector<InferShapeVarPtr, phi::kOutputSmallVectorSize>
  GetOutputVarPtrs(const std::string &name) const override {
219
    const std::vector<std::string> arg_names = Outputs(name);
220
    paddle::SmallVector<InferShapeVarPtr, phi::kOutputSmallVectorSize> res;
221 222 223 224 225 226 227 228
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

X
Xin Pan 已提交
229 230 231
  DDim GetInputDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
232 233 234 235
                      platform::errors::InvalidArgument(
                          "The input(%s) should hold only one element, but now "
                          "it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
236 237 238 239 240 241 242 243
    return this->GetDim(arg_names[0]);
  }

  std::vector<DDim> GetInputsDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    return GetDims(arg_names);
  }

244 245
  bool IsRuntime() const override;

246 247
  bool IsRunMKLDNNKernel() const override;

248 249 250 251
  proto::VarType::Type GetInputVarType(const std::string &name) const override {
    return GetVarType(Inputs(name).at(0));
  }

X
Xin Pan 已提交
252 253 254 255 256 257 258 259 260 261
  std::vector<proto::VarType::Type> GetInputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Inputs(name));
  }

  std::vector<proto::VarType::Type> GetOutputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Outputs(name));
  }

X
Xin Pan 已提交
262
  void SetOutputDim(const std::string &name, const DDim &dim) override {
H
hong 已提交
263
    auto arg_names = Outputs(name);
X
Xin Pan 已提交
264
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
265 266 267 268
                      platform::errors::InvalidArgument(
                          "The iutput(%s) should hold only one element, but "
                          "now it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
269 270 271 272 273
    SetDim(arg_names[0], dim);
  }

  void SetOutputsDim(const std::string &name,
                     const std::vector<DDim> &dims) override {
H
hong 已提交
274
    auto names = Outputs(name);
X
Xin Pan 已提交
275 276 277
    SetDims(names, dims);
  }

278 279 280 281 282 283 284 285
  const phi::ArgumentMappingFn *GetPhiArgumentMappingFn() const override {
    return phi::OpUtilsMap::Instance().GetArgumentMappingFn(op_.Type());
  }

  const phi::KernelSignature *GetPhiDefaultKernelSignature() const override {
    return &phi::DefaultKernelSignatureMap::Instance().Get(op_.Type());
  }

286
 protected:
X
Xin Pan 已提交
287 288 289 290 291 292 293 294 295 296 297 298
  std::vector<proto::VarType::Type> GetVarTypes(
      const std::vector<std::string> &names) const {
    std::vector<proto::VarType::Type> retv;
    retv.resize(names.size());
    std::transform(
        names.begin(), names.end(), retv.begin(),
        std::bind(std::mem_fn(&CompileTimeInferShapeContext::GetVarType), this,
                  std::placeholders::_1));
    return retv;
  }

  proto::VarType::Type GetVarType(const std::string &name) const;
Q
Qiao Longfei 已提交
299

X
Xin Pan 已提交
300 301
  DDim GetDim(const std::string &name) const {
    auto var = block_.FindVarRecursive(name);
302 303
    PADDLE_ENFORCE_NOT_NULL(
        var, platform::errors::NotFound("Variable %s is not found.", name));
X
Xin Pan 已提交
304 305 306
    DDim res;
    try {
      auto shape = var->GetShape();
307
      res = shape.empty() ? phi::make_ddim({0UL}) : phi::make_ddim(shape);
X
Xin Pan 已提交
308 309 310 311 312 313 314 315 316 317 318 319 320 321 322
    } catch (...) {
      VLOG(5) << "GetDim of variable " << name << " error";
      std::rethrow_exception(std::current_exception());
    }
    return res;
  }

  std::vector<DDim> GetDims(const std::vector<std::string> &names) const {
    std::vector<DDim> ret;
    ret.reserve(names.size());
    std::transform(
        names.begin(), names.end(), std::back_inserter(ret),
        [this](const std::string &name) { return this->GetDim(name); });
    return ret;
  }
323

X
Xin Pan 已提交
324 325 326 327 328
  void SetDim(const std::string &name, const DDim &dim);

  void SetDims(const std::vector<std::string> &names,
               const std::vector<DDim> &dims) {
    size_t length = names.size();
329 330 331 332 333
    PADDLE_ENFORCE_EQ(length, dims.size(),
                      platform::errors::InvalidArgument(
                          "The input variables number(%d) and input dimensions "
                          "number(%d) do not match.",
                          length, dims.size()));
X
Xin Pan 已提交
334 335 336 337 338 339 340
    for (size_t i = 0; i < length; ++i) {
      if (names[i] == framework::kEmptyVarName) {
        continue;
      }
      SetDim(names[i], dims[i]);
    }
  }
341

F
fengjiayi 已提交
342 343 344 345
  std::vector<DDim> GetRepeatedDims(const std::string &name) const override;

  void SetRepeatedDims(const std::string &name,
                       const std::vector<DDim> &dims) override;
F
fengjiayi 已提交
346

Y
Yu Yang 已提交
347 348
  const OpDesc &op_;
  const BlockDesc &block_;
349 350
};

Y
Yu Yang 已提交
351 352
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
353
  desc_.set_type(type);
F
fengjiayi 已提交
354 355 356
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
357
  need_update_ = true;
L
liuwei1031 已提交
358
  block_ = nullptr;
F
fengjiayi 已提交
359 360
}

X
Xin Pan 已提交
361 362 363 364 365 366
OpDesc::OpDesc(const OpDesc &other, BlockDesc *block) {
  CopyFrom(other);
  block_ = block;
  need_update_ = true;
}

367
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
368 369 370 371
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
372 373
  // The record of original_id_ is only for auto parallel.
  original_id_ = op_desc.original_id_;
F
fengjiayi 已提交
374 375 376
  need_update_ = true;
}

F
fengjiayi 已提交
377
OpDesc::OpDesc(const proto::OpDesc &desc, BlockDesc *block)
378 379 380 381
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
382
    const proto::OpDesc::Var &var = desc_.inputs(i);
383 384 385 386 387 388 389 390 391 392
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
393
    const proto::OpDesc::Var &var = desc_.outputs(i);
394 395 396 397 398 399 400 401
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
402
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
403
    std::string attr_name = attr.name();
404
    // The sub_block referred to by the BLOCK attr hasn't been added
X
Xin Pan 已提交
405 406 407
    // to ProgramDesc class yet, we skip setting BLOCK/BLOCKS attr here.
    if (attr.type() != proto::AttrType::BLOCK &&
        attr.type() != proto::AttrType::BLOCKS) {
408 409
      attrs_[attr_name] = GetAttrValue(attr);
    }
410
  }
411
  this->block_ = block;
412 413
}

Y
Yu Yang 已提交
414
proto::OpDesc *OpDesc::Proto() {
415
  Flush();
416
  return &desc_;
F
fengjiayi 已提交
417 418
}

Y
Yu Yang 已提交
419
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
420
  auto it = inputs_.find(name);
421 422 423 424
  PADDLE_ENFORCE_NE(
      it, inputs_.end(),
      platform::errors::NotFound("Input %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
425 426 427
  return it->second;
}

Y
Yu Yang 已提交
428
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
429 430 431 432 433 434 435
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
436 437
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
438 439 440 441
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
442
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
443
  auto it = outputs_.find(name);
444 445 446 447
  PADDLE_ENFORCE_NE(
      it, outputs_.end(),
      platform::errors::NotFound("Output %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
448 449 450
  return it->second;
}

451 452 453 454
bool OpDesc::HasOutput(const std::string &name) const {
  return outputs_.find(name) != outputs_.end();
}

Y
Yu Yang 已提交
455
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
456 457 458 459 460 461 462
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
463 464
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
465 466 467 468
  need_update_ = true;
  this->outputs_[param_name] = args;
}

469 470 471 472 473
void OpDesc::RemoveOutput(const std::string &name) {
  outputs_.erase(name);
  need_update_ = true;
}

474 475 476 477 478
void OpDesc::RemoveInput(const std::string &name) {
  inputs_.erase(name);
  need_update_ = true;
}

479 480 481 482 483 484 485 486 487 488
bool OpDesc::HasProtoAttr(const std::string &name) const {
  auto &op_info = OpInfoMap::Instance();
  if (op_info.Has(desc_.type())) {
    auto op_info_ptr = op_info.Get(desc_.type());
    if (op_info_ptr.HasOpProtoAndChecker()) {
      const proto::OpProto &proto = op_info_ptr.Proto();
      for (int i = 0; i != proto.attrs_size(); ++i) {
        const proto::OpProto::Attr &attr = proto.attrs(i);
        if (attr.name() == name) {
          return true;
L
luotao1 已提交
489 490
        }
      }
L
luotao1 已提交
491 492 493 494 495
    }
  }
  return false;
}

Y
Yu Yang 已提交
496
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
497
  auto it = attrs_.find(name);
498 499
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
500
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
501 502
}

Y
Yu Yang 已提交
503
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
504 505 506 507 508 509 510 511
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

512 513 514 515 516
void OpDesc::RemoveAttr(const std::string &name) {
  attrs_.erase(name);
  need_update_ = true;
}

Y
Yu Yang 已提交
517
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
M
minqiyang 已提交
518 519 520 521 522
  // NOTICE(minqiyang): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
523
      BOOST_GET_CONST(std::vector<int>, v).size() == 0u) {
M
minqiyang 已提交
524
    // Find current attr via attr name and set the correct attribute value
M
minqiyang 已提交
525
    const proto::OpProto::Attr &attr = GetProtoAttr(name);
M
minqiyang 已提交
526 527
    switch (attr.type()) {
      case proto::AttrType::BOOLEANS: {
M
minqiyang 已提交
528 529
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BOOLEANS";
M
minqiyang 已提交
530 531 532 533
        this->attrs_[name] = std::vector<bool>();
        break;
      }
      case proto::AttrType::INTS: {
M
minqiyang 已提交
534 535
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to INTS";
M
minqiyang 已提交
536 537 538
        this->attrs_[name] = std::vector<int>();
        break;
      }
539
      case proto::AttrType::LONGS: {
M
minqiyang 已提交
540 541
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from LONGS to LONGS";
542 543 544
        this->attrs_[name] = std::vector<int64_t>();
        break;
      }
M
minqiyang 已提交
545
      case proto::AttrType::FLOATS: {
M
minqiyang 已提交
546 547
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to FLOATS";
M
minqiyang 已提交
548 549 550 551
        this->attrs_[name] = std::vector<float>();
        break;
      }
      case proto::AttrType::STRINGS: {
M
minqiyang 已提交
552 553
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to STRINGS";
M
minqiyang 已提交
554 555 556 557
        this->attrs_[name] = std::vector<std::string>();
        break;
      }
      case proto::AttrType::BLOCKS: {
M
minqiyang 已提交
558 559
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BLOCKS";
M
minqiyang 已提交
560
        this->SetBlocksAttr(name, std::vector<BlockDesc *>());
M
minqiyang 已提交
561 562
        return;
      }
M
minqiyang 已提交
563
      default:
564 565
        PADDLE_THROW(platform::errors::Unimplemented(
            "Unsupported attribute type (code %d).", attr.type()));
M
minqiyang 已提交
566
    }
M
minqiyang 已提交
567 568
    need_update_ = true;
    return;
M
minqiyang 已提交
569 570
  }

571 572 573
  // In order to set bool attr properly
  if (attr_type == proto::AttrType::INT && HasProtoAttr(name) &&
      GetProtoAttr(name).type() == proto::AttrType::BOOLEAN) {
574
    this->attrs_[name] = static_cast<bool>(BOOST_GET_CONST(int, v));
575 576 577 578
    need_update_ = true;
    return;
  }

F
fengjiayi 已提交
579 580 581 582
  this->attrs_[name] = v;
  need_update_ = true;
}

A
Abhinav Arora 已提交
583 584
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc *block) {
  this->attrs_[name] = block;
F
fengjiayi 已提交
585
  need_update_ = true;
F
fengjiayi 已提交
586 587
}

588 589 590 591 592 593
void OpDesc::SetBlocksAttr(const std::string &name,
                           std::vector<BlockDesc *> blocks) {
  this->attrs_[name] = blocks;
  need_update_ = true;
}

Y
Yu Yang 已提交
594
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
595 596 597 598 599
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
600
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
601
  auto it = attrs_.find(name);
602 603
  PADDLE_ENFORCE_NE(it, attrs_.end(), platform::errors::NotFound(
                                          "Attribute %s is not found.", name));
F
fengjiayi 已提交
604 605 606
  return it->second;
}

M
minqiyang 已提交
607 608 609
const proto::OpProto::Attr &OpDesc::GetProtoAttr(
    const std::string &name) const {
  const proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
M
minqiyang 已提交
610 611 612 613 614 615 616
  for (int i = 0; i != proto.attrs_size(); ++i) {
    const proto::OpProto::Attr &attr = proto.attrs(i);
    if (attr.name() == name) {
      return attr;
    }
  }

617 618
  PADDLE_THROW(platform::errors::NotFound(
      "Attribute %s is not found in proto %s.", name, proto.type()));
M
minqiyang 已提交
619 620
}

Y
yuyang18 已提交
621
Attribute OpDesc::GetNullableAttr(const std::string &name) const {
Y
Fix bug  
yuyang18 已提交
622 623 624 625
  auto it = attrs_.find(name);
  if (it != attrs_.end()) {
    return it->second;
  } else {
Y
yuyang18 已提交
626
    return Attribute();
Y
Fix bug  
yuyang18 已提交
627 628 629
  }
}

G
gongweibao 已提交
630 631
std::vector<int> OpDesc::GetBlocksAttrIds(const std::string &name) const {
  auto it = attrs_.find(name);
632 633 634 635
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
636
  auto blocks = BOOST_GET_CONST(std::vector<BlockDesc *>, it->second);
G
gongweibao 已提交
637 638 639 640 641 642 643 644 645 646

  std::vector<int> ids;
  for (auto n : blocks) {
    ids.push_back(n->ID());
  }

  return ids;
}

int OpDesc::GetBlockAttrId(const std::string &name) const {
F
fengjiayi 已提交
647
  auto it = attrs_.find(name);
648 649 650 651
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
652
  return BOOST_GET_CONST(BlockDesc *, it->second)->ID();
F
fengjiayi 已提交
653 654
}

Y
Yu Yang 已提交
655
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
656 657 658
  return attrs_;
}

Y
Yu Yang 已提交
659
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
Y
Yancey1989 已提交
660 661
  RenameInput(old_name, new_name);
  RenameOutput(old_name, new_name);
F
fengjiayi 已提交
662 663 664
  need_update_ = true;
}

Y
Yu Yang 已提交
665 666
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
667 668 669 670
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
Y
yuyang18 已提交
671 672 673

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
674
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
yuyang18 已提交
675 676 677
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
678 679 680
  need_update_ = true;
}

Y
Yu Yang 已提交
681 682
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
683 684 685
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
Y
Yancey1989 已提交
686 687 688

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
689
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
Yancey1989 已提交
690 691 692
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
693 694 695
  need_update_ = true;
}

Y
Yu Yang 已提交
696
struct SetAttrDescVisitor : public boost::static_visitor<void> {
697 698
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
699 700 701
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
702 703 704 705 706 707 708

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
709 710 711 712 713 714 715 716 717 718 719 720 721

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
722 723 724
  void operator()(const std::vector<BlockDesc *> &v) const {
    std::vector<int> blocks_idx;
    for (auto blk : v) {
T
tangwei12 已提交
725
      blocks_idx.push_back(blk->ID());
726 727 728
    }
    VectorToRepeated(blocks_idx, attr_->mutable_blocks_idx());
  }
T
tangwei12 已提交
729 730 731

  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }

732
  void operator()(int64_t v) const { attr_->set_l(v); }
T
tangwei12 已提交
733 734 735 736 737

  void operator()(const std::vector<int64_t> &v) const {
    VectorToRepeated(v, attr_->mutable_longs());
  }

738 739 740 741
  void operator()(const std::vector<double> &v) const {
    VectorToRepeated(v, attr_->mutable_float64s());
  }

742 743 744 745 746
  void operator()(boost::blank) const {
    PADDLE_THROW(platform::errors::Unavailable(
        "Unsupported calling method of SetAttrDescVisitor object for "
        "`boosst::blank` type."));
  }
Y
Yu Yang 已提交
747 748
};

Y
Yu Yang 已提交
749
void OpDesc::Flush() {
F
fengjiayi 已提交
750
  if (need_update_) {
751
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
752
    for (auto &ipt : inputs_) {
753
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
754 755 756 757
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

758
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
759
    for (auto &opt : outputs_) {
760
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
761 762 763 764
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

765
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
766
    for (auto &attr : attrs_) {
767
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
768 769
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
770
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
771 772
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
773 774 775 776 777
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
778

Y
Yu Yang 已提交
779
void OpDesc::CheckAttrs() {
780 781 782
  PADDLE_ENFORCE_EQ(Type().empty(), false,
                    platform::errors::PreconditionNotMet(
                        "CheckAttrs() can not be called before type is set."));
Y
Yu Yang 已提交
783 784 785 786 787 788
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
789
  VLOG(10) << "begin to check attribute of " << Type();
T
tangwei12 已提交
790
  checker->Check(&attrs_);
F
fengjiayi 已提交
791 792
}

H
hong 已提交
793
void OpDesc::InferShape(const BlockDesc &block) {
794 795
  try {
    VLOG(3) << "CompileTime infer shape on " << Type();
H
hong 已提交
796 797 798 799 800 801 802 803
    auto &op_info = OpInfoMap::Instance().Get(this->Type());
    auto *checker = op_info.Checker();
    if (checker != nullptr) {
      // set dafault value here
      VLOG(10) << "begin to check attribute of " << Type();
      checker->Check(&attrs_);
    }
    auto &infer_shape = op_info.infer_shape_;
804 805 806 807
    PADDLE_ENFORCE_EQ(
        static_cast<bool>(infer_shape), true,
        platform::errors::NotFound(
            "Operator %s's infer_shape is not registered.", this->Type()));
808 809 810 811 812 813 814 815 816 817 818 819 820 821 822
    CompileTimeInferShapeContext ctx(*this, block);
    if (VLOG_IS_ON(10)) {
      std::ostringstream sout;
      auto inames = this->InputArgumentNames();
      sout << " From [";
      std::copy(inames.begin(), inames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "] to [";
      auto onames = this->OutputArgumentNames();
      std::copy(onames.begin(), onames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "]";
      VLOG(10) << sout.str();
    }
    infer_shape(&ctx);
823
  } catch (platform::EnforceNotMet &exception) {
824
    framework::AppendErrorOpHint(Type(), &exception);
825 826 827 828
    throw std::move(exception);
  } catch (...) {
    std::rethrow_exception(std::current_exception());
  }
Y
Yu Yang 已提交
829 830
}

Y
Yu Yang 已提交
831
void OpDesc::InferVarType(BlockDesc *block) const {
X
Xin Pan 已提交
832 833
  // There are a few places that var type can be set.
  // When VarDesc is created, default set to LOD_TENSOR.
T
tianshuo78520a 已提交
834
  // When output variable is created, default is default set to LOD_TENSOR.
X
Xin Pan 已提交
835 836
  // We limit here to be the only place that operator defines its customized
  // var type inference. Hence, we don't do any "default" setting here.
Y
Yu Yang 已提交
837 838
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
M
minqiyang 已提交
839
    InferVarTypeContext context(this, block);
M
minqiyang 已提交
840
    info.infer_var_type_(&context);
Y
Yu Yang 已提交
841 842 843
  }
}

844
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
845
    const OpDesc &op, const BlockDesc &block)
846 847 848
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
849 850 851
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
852 853 854 855 856
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
857 858 859 860
  PADDLE_ENFORCE_EQ(length, 1UL, platform::errors::InvalidArgument(
                                     "Input(%s) should have only one value, "
                                     "but it has %d values now.",
                                     name, length));
861 862 863 864
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
865 866 867
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
868 869 870 871 872
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
873 874 875 876
  PADDLE_ENFORCE_EQ(length, 1UL, platform::errors::InvalidArgument(
                                     "Output(%s) should have only one value, "
                                     "but it has %d values now.",
                                     name, length));
877 878 879
  return block_.HasVarRecursive(output_names[0]);
}

880 881 882 883
bool CompileTimeInferShapeContext::HasAttr(const std::string &name) const {
  return op_.HasAttr(name);
}

884
bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
885 886 887
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
888 889 890 891 892 893 894 895 896 897
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

898 899
bool CompileTimeInferShapeContext::HasOutputs(const std::string &name,
                                              bool allow_null) const {
900 901 902
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
903 904 905 906
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
907 908 909 910 911 912 913 914 915 916
  if (allow_null) {
    for (auto &output : output_names) {
      if (block_.HasVarRecursive(output)) return true;
    }
    return false;
  } else {
    for (auto &output : output_names) {
      if (!block_.HasVarRecursive(output)) return false;
    }
    return true;
917 918 919 920 921 922 923
  }
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

H
hong 已提交
924
std::vector<std::string> CompileTimeInferShapeContext::Inputs(
925 926 927 928
    const std::string &name) const {
  return op_.Input(name);
}

H
hong 已提交
929
std::vector<std::string> CompileTimeInferShapeContext::Outputs(
930 931 932 933
    const std::string &name) const {
  return op_.Output(name);
}

F
fengjiayi 已提交
934
std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDims(
F
fengjiayi 已提交
935 936
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
937 938
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
939 940 941 942
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
943
      res.push_back(s.empty() ? phi::make_ddim({0UL}) : phi::make_ddim(s));
F
fengjiayi 已提交
944 945
    }
  } catch (...) {
M
minqiyang 已提交
946
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
F
fengjiayi 已提交
947 948 949
    std::rethrow_exception(std::current_exception());
  }
  return res;
950 951 952 953
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
954
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
955
}
F
fengjiayi 已提交
956 957 958 959

void CompileTimeInferShapeContext::SetRepeatedDims(
    const std::string &name, const std::vector<DDim> &dims) {
  auto var = block_.FindVarRecursive(name);
960 961
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
962
  std::vector<std::vector<int64_t>> dim_vec(dims.size());
963
  std::transform(dims.begin(), dims.end(), dim_vec.begin(), phi::vectorize<>);
F
fengjiayi 已提交
964
  var->SetShapes(dim_vec);
965
}
F
fengjiayi 已提交
966

967 968
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

969 970
bool CompileTimeInferShapeContext::IsRunMKLDNNKernel() const { return false; }

971
proto::VarType::Type CompileTimeInferShapeContext::GetVarType(
972 973 974
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
975

F
fengjiayi 已提交
976 977
}  // namespace framework
}  // namespace paddle