op_desc.cc 33.5 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
F
fengjiayi 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/op_desc.h"
16

17
#include <string>
18

19
#include "glog/logging.h"
Y
Yi Wang 已提交
20
#include "paddle/fluid/framework/block_desc.h"
21
#include "paddle/fluid/framework/op_call_stack.h"
Y
yuyang18 已提交
22
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yi Wang 已提交
23 24
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/framework/shape_inference.h"
M
minqiyang 已提交
25
#include "paddle/fluid/framework/var_type_inference.h"
Y
Yu Yang 已提交
26

F
fengjiayi 已提交
27 28 29
namespace paddle {
namespace framework {

30 31
class CompileTimeInferShapeContext : public InferShapeContext {
 public:
Y
Yu Yang 已提交
32
  CompileTimeInferShapeContext(const OpDesc &op, const BlockDesc &block);
33 34 35 36 37

  bool HasInput(const std::string &name) const override;

  bool HasOutput(const std::string &name) const override;

38 39
  bool HasAttr(const std::string &name) const override;

40 41
  bool HasInputs(const std::string &name) const override;

42 43
  bool HasOutputs(const std::string &name,
                  bool allow_null = false) const override;
44 45 46

  AttrReader Attrs() const override;

H
hong 已提交
47
  std::vector<std::string> Inputs(const std::string &name) const override;
48

H
hong 已提交
49
  std::vector<std::string> Outputs(const std::string &name) const override;
50

51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
  std::string GetInputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(idx, op_proto->inputs().size(),
                      platform::errors::OutOfRange(
                          "The index should be less than the size of inputs of "
                          "operator %s, but got index is %d and size is %d",
                          op_.Type(), idx, op_proto->inputs().size()));
    return op_proto->inputs()[idx].name();
  }

  std::string GetOutputNameByIdx(size_t idx) const override {
    auto &op_proto =
        paddle::framework::OpInfoMap::Instance().Get(op_.Type()).proto_;
    PADDLE_ENFORCE_LT(
        idx, op_proto->outputs().size(),
        platform::errors::OutOfRange(
            "The index should be less than the size of outputs of "
            "operator %s, but got index is %d and size is %d",
            op_.Type(), idx, op_proto->outputs().size()));
    return op_proto->outputs()[idx].name();
  }

74 75
  void ShareDim(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) override {
76 77 78 79 80 81 82 83 84 85 86
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));

H
hong 已提交
87 88
    std::string input_n = Inputs(in)[i];
    std::string output_n = Outputs(out)[j];
89

90 91 92 93 94 95
    PADDLE_ENFORCE_NE(input_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(output_n, framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
96 97 98 99

    auto *in_var = block_.FindVarRecursive(input_n);
    auto *out_var = block_.FindVarRecursive(output_n);

100 101 102 103 104 105 106
    PADDLE_ENFORCE_EQ(
        in_var->GetType(), out_var->GetType(),
        platform::errors::InvalidArgument(
            "The type of input %s and output %s do not match. The input type "
            "is %s, output type is %s.",
            input_n, output_n, DataTypeToString(in_var->GetType()),
            DataTypeToString(out_var->GetType())));
107 108 109 110

    SetDim(output_n, GetDim(input_n));
  }

H
hong 已提交
111 112 113 114 115 116 117 118
  void ShareAllLoD(const std::string &in,
                   const std::string &out) const override {
    auto &in_var_names = op_.Input(in);
    auto &out_var_names = op_.Output(out);

    PADDLE_ENFORCE_EQ(
        in_var_names.size(), out_var_names.size(),
        platform::errors::PreconditionNotMet(
T
tianshuo78520a 已提交
119
            "Op [%s]:  Input var number should be equal with output var number",
H
hong 已提交
120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137
            op_.Type()));

    for (size_t i = 0; i < in_var_names.size(); ++i) {
      if (out_var_names[i] == framework::kEmptyVarName) {
        continue;
      }

      auto *in_var = block_.FindVarRecursive(in_var_names[i]);
      auto *out_var = block_.FindVarRecursive(out_var_names[i]);
      if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
          in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
        VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
        return;
      }
      out_var->SetLoDLevel(in_var->GetLoDLevel());
    }
  }

Q
Qiao Longfei 已提交
138 139
  void ShareLoD(const std::string &in, const std::string &out, size_t i = 0,
                size_t j = 0) const override {
140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Inputs(in).size(), i));
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, expected "
                          "index less than %d, but received index is %d.",
                          Outputs(out).size(), j));
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] is empty.", in, i));
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] is empty.", out, j));
Q
Qiao Longfei 已提交
156 157
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
C
chengduo 已提交
158 159
    if (in_var->GetType() != proto::VarType::LOD_TENSOR &&
        in_var->GetType() != proto::VarType::LOD_TENSOR_ARRAY) {
160
      VLOG(3) << "input " << in << " is not LoDTensor or LoDTensorArray.";
X
fix  
Xin Pan 已提交
161 162
      return;
    }
163
    out_var->SetLoDLevel(in_var->GetLoDLevel());
Q
Qiao Longfei 已提交
164
  }
D
dzhwinter 已提交
165

166 167
  int32_t GetLoDLevel(const std::string &in, size_t i = 0) const override {
    PADDLE_ENFORCE_LT(i, Inputs(in).size(),
168 169 170 171
                      platform::errors::InvalidArgument(
                          "The input variable index is out of range, input "
                          "variable %s of operator %s only has %d elements.",
                          in, op_.Type(), Inputs(in).size()));
172
    PADDLE_ENFORCE_NE(Inputs(in)[i], framework::kEmptyVarName,
173 174 175
                      platform::errors::InvalidArgument(
                          "The input variable %s[%d] of operator %s is empty.",
                          in, i, op_.Type()));
C
chengduo 已提交
176
    auto *in_var = block_.FindVarRecursive(Inputs(in)[i]);
177
    PADDLE_ENFORCE_NOT_NULL(
178 179 180
        in_var, platform::errors::NotFound(
                    "The input variable %s[%d] of operator %s is not found.",
                    in, i, op_.Type()));
181
    return in_var->GetLoDLevel();
C
chengduo 已提交
182 183
  }

184 185 186
  void SetLoDLevel(const std::string &out, int32_t lod_level,
                   size_t j = 0) const override {
    PADDLE_ENFORCE_LT(j, Outputs(out).size(),
187 188 189 190
                      platform::errors::InvalidArgument(
                          "The output variable index is out of range, output "
                          "variable %s of operator %s only has %d elements.",
                          out, op_.Type(), Outputs(out).size()));
191
    PADDLE_ENFORCE_NE(Outputs(out)[j], framework::kEmptyVarName,
192 193 194
                      platform::errors::InvalidArgument(
                          "The output variable %s[%d] of operator %s is empty.",
                          out, j, op_.Type()));
195
    auto *out_var = block_.FindVarRecursive(Outputs(out)[j]);
196
    PADDLE_ENFORCE_NOT_NULL(
197 198 199
        out_var, platform::errors::NotFound(
                     "The output variable %s[%d] of operator %s is not found.",
                     out, j, op_.Type()));
200 201 202
    if (lod_level >= 0) {
      out_var->SetLoDLevel(lod_level);
    }
203 204
  }

C
Chen Weihang 已提交
205
  paddle::small_vector<InferShapeVarPtr, phi::kInputSmallVectorSize>
206
  GetInputVarPtrs(const std::string &name) const override {
207
    const std::vector<std::string> arg_names = Inputs(name);
C
Chen Weihang 已提交
208
    paddle::small_vector<InferShapeVarPtr, phi::kInputSmallVectorSize> res;
209 210 211 212 213 214 215 216
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

C
Chen Weihang 已提交
217
  paddle::small_vector<InferShapeVarPtr, phi::kOutputSmallVectorSize>
218
  GetOutputVarPtrs(const std::string &name) const override {
219
    const std::vector<std::string> arg_names = Outputs(name);
C
Chen Weihang 已提交
220
    paddle::small_vector<InferShapeVarPtr, phi::kOutputSmallVectorSize> res;
221 222 223 224 225 226 227 228
    res.reserve(arg_names.size());
    std::transform(arg_names.begin(), arg_names.end(), std::back_inserter(res),
                   [this](const std::string &name) {
                     return block_.FindVarRecursive(name);
                   });
    return res;
  }

X
Xin Pan 已提交
229 230 231
  DDim GetInputDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
232 233 234 235
                      platform::errors::InvalidArgument(
                          "The input(%s) should hold only one element, but now "
                          "it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
236 237 238 239 240 241 242 243
    return this->GetDim(arg_names[0]);
  }

  std::vector<DDim> GetInputsDim(const std::string &name) const override {
    const std::vector<std::string> &arg_names = Inputs(name);
    return GetDims(arg_names);
  }

244 245
  bool IsRuntime() const override;

246 247
  bool IsRunMKLDNNKernel() const override;

248 249 250 251
  proto::VarType::Type GetInputVarType(const std::string &name) const override {
    return GetVarType(Inputs(name).at(0));
  }

X
Xin Pan 已提交
252 253 254 255 256 257 258 259 260 261
  std::vector<proto::VarType::Type> GetInputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Inputs(name));
  }

  std::vector<proto::VarType::Type> GetOutputsVarType(
      const std::string &name) const override {
    return GetVarTypes(Outputs(name));
  }

X
Xin Pan 已提交
262
  void SetOutputDim(const std::string &name, const DDim &dim) override {
H
hong 已提交
263
    auto arg_names = Outputs(name);
X
Xin Pan 已提交
264
    PADDLE_ENFORCE_EQ(arg_names.size(), 1UL,
265 266 267 268
                      platform::errors::InvalidArgument(
                          "The iutput(%s) should hold only one element, but "
                          "now it holds %d elements.",
                          name, arg_names.size()));
X
Xin Pan 已提交
269 270 271 272 273
    SetDim(arg_names[0], dim);
  }

  void SetOutputsDim(const std::string &name,
                     const std::vector<DDim> &dims) override {
H
hong 已提交
274
    auto names = Outputs(name);
X
Xin Pan 已提交
275 276 277
    SetDims(names, dims);
  }

278 279 280 281 282 283 284 285
  const phi::ArgumentMappingFn *GetPhiArgumentMappingFn() const override {
    return phi::OpUtilsMap::Instance().GetArgumentMappingFn(op_.Type());
  }

  const phi::KernelSignature *GetPhiDefaultKernelSignature() const override {
    return &phi::DefaultKernelSignatureMap::Instance().Get(op_.Type());
  }

286
 protected:
X
Xin Pan 已提交
287 288 289 290 291 292 293 294 295 296 297 298
  std::vector<proto::VarType::Type> GetVarTypes(
      const std::vector<std::string> &names) const {
    std::vector<proto::VarType::Type> retv;
    retv.resize(names.size());
    std::transform(
        names.begin(), names.end(), retv.begin(),
        std::bind(std::mem_fn(&CompileTimeInferShapeContext::GetVarType), this,
                  std::placeholders::_1));
    return retv;
  }

  proto::VarType::Type GetVarType(const std::string &name) const;
Q
Qiao Longfei 已提交
299

X
Xin Pan 已提交
300 301
  DDim GetDim(const std::string &name) const {
    auto var = block_.FindVarRecursive(name);
302 303
    PADDLE_ENFORCE_NOT_NULL(
        var, platform::errors::NotFound("Variable %s is not found.", name));
X
Xin Pan 已提交
304 305 306
    DDim res;
    try {
      auto shape = var->GetShape();
307
      res = shape.empty() ? phi::make_ddim({0UL}) : phi::make_ddim(shape);
X
Xin Pan 已提交
308 309 310 311 312 313 314 315 316 317 318 319 320 321 322
    } catch (...) {
      VLOG(5) << "GetDim of variable " << name << " error";
      std::rethrow_exception(std::current_exception());
    }
    return res;
  }

  std::vector<DDim> GetDims(const std::vector<std::string> &names) const {
    std::vector<DDim> ret;
    ret.reserve(names.size());
    std::transform(
        names.begin(), names.end(), std::back_inserter(ret),
        [this](const std::string &name) { return this->GetDim(name); });
    return ret;
  }
323

X
Xin Pan 已提交
324 325 326 327 328
  void SetDim(const std::string &name, const DDim &dim);

  void SetDims(const std::vector<std::string> &names,
               const std::vector<DDim> &dims) {
    size_t length = names.size();
329 330 331 332 333
    PADDLE_ENFORCE_EQ(length, dims.size(),
                      platform::errors::InvalidArgument(
                          "The input variables number(%d) and input dimensions "
                          "number(%d) do not match.",
                          length, dims.size()));
X
Xin Pan 已提交
334 335 336 337 338 339 340
    for (size_t i = 0; i < length; ++i) {
      if (names[i] == framework::kEmptyVarName) {
        continue;
      }
      SetDim(names[i], dims[i]);
    }
  }
341

F
fengjiayi 已提交
342 343 344 345
  std::vector<DDim> GetRepeatedDims(const std::string &name) const override;

  void SetRepeatedDims(const std::string &name,
                       const std::vector<DDim> &dims) override;
F
fengjiayi 已提交
346

Y
Yu Yang 已提交
347 348
  const OpDesc &op_;
  const BlockDesc &block_;
349 350
};

Y
Yu Yang 已提交
351 352
OpDesc::OpDesc(const std::string &type, const VariableNameMap &inputs,
               const VariableNameMap &outputs, const AttributeMap &attrs) {
353
  desc_.set_type(type);
F
fengjiayi 已提交
354 355 356
  inputs_ = inputs;
  outputs_ = outputs;
  attrs_ = attrs;
F
Fix bug  
fengjiayi 已提交
357
  need_update_ = true;
L
liuwei1031 已提交
358
  block_ = nullptr;
F
fengjiayi 已提交
359 360
}

X
Xin Pan 已提交
361 362 363 364 365 366
OpDesc::OpDesc(const OpDesc &other, BlockDesc *block) {
  CopyFrom(other);
  block_ = block;
  need_update_ = true;
}

367
void OpDesc::CopyFrom(const OpDesc &op_desc) {
F
fengjiayi 已提交
368 369 370 371
  desc_.set_type(op_desc.Type());
  inputs_ = op_desc.inputs_;
  outputs_ = op_desc.outputs_;
  attrs_ = op_desc.attrs_;
372 373
  // The record of original_id_ is only for auto parallel.
  original_id_ = op_desc.original_id_;
F
fengjiayi 已提交
374 375 376
  need_update_ = true;
}

F
fengjiayi 已提交
377
OpDesc::OpDesc(const proto::OpDesc &desc, BlockDesc *block)
378 379 380 381
    : desc_(desc), need_update_(false) {
  // restore inputs_
  int input_size = desc_.inputs_size();
  for (int i = 0; i < input_size; ++i) {
382
    const proto::OpDesc::Var &var = desc_.inputs(i);
383 384 385 386 387 388 389 390 391 392
    std::vector<std::string> &args = inputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore outputs_
  int output_size = desc_.outputs_size();
  for (int i = 0; i < output_size; ++i) {
393
    const proto::OpDesc::Var &var = desc_.outputs(i);
394 395 396 397 398 399 400 401
    std::vector<std::string> &args = outputs_[var.parameter()];
    int argu_size = var.arguments_size();
    args.reserve(argu_size);
    for (int j = 0; j < argu_size; ++j) {
      args.push_back(var.arguments(j));
    }
  }
  // restore attrs_
402
  for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
403
    std::string attr_name = attr.name();
404
    // The sub_block referred to by the BLOCK attr hasn't been added
X
Xin Pan 已提交
405 406 407
    // to ProgramDesc class yet, we skip setting BLOCK/BLOCKS attr here.
    if (attr.type() != proto::AttrType::BLOCK &&
        attr.type() != proto::AttrType::BLOCKS) {
408 409
      attrs_[attr_name] = GetAttrValue(attr);
    }
410
  }
411
  this->block_ = block;
412 413
}

Y
Yu Yang 已提交
414
proto::OpDesc *OpDesc::Proto() {
415
  Flush();
416
  return &desc_;
F
fengjiayi 已提交
417 418
}

Y
Yu Yang 已提交
419
const std::vector<std::string> &OpDesc::Input(const std::string &name) const {
F
fengjiayi 已提交
420
  auto it = inputs_.find(name);
421 422 423 424
  PADDLE_ENFORCE_NE(
      it, inputs_.end(),
      platform::errors::NotFound("Input %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
425 426 427
  return it->second;
}

Y
Yu Yang 已提交
428
std::vector<std::string> OpDesc::InputArgumentNames() const {
F
Update  
fengjiayi 已提交
429 430 431 432 433 434 435
  std::vector<std::string> retv;
  for (auto &ipt : this->inputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
436 437
void OpDesc::SetInput(const std::string &param_name,
                      const std::vector<std::string> &args) {
F
fengjiayi 已提交
438 439 440 441
  need_update_ = true;
  inputs_[param_name] = args;
}

Y
Yu Yang 已提交
442
const std::vector<std::string> &OpDesc::Output(const std::string &name) const {
F
fengjiayi 已提交
443
  auto it = outputs_.find(name);
444 445 446 447
  PADDLE_ENFORCE_NE(
      it, outputs_.end(),
      platform::errors::NotFound("Output %s cannot be found in operator %s.",
                                 name, Type()));
F
fengjiayi 已提交
448 449 450
  return it->second;
}

451 452 453 454
bool OpDesc::HasOutput(const std::string &name) const {
  return outputs_.find(name) != outputs_.end();
}

Y
Yu Yang 已提交
455
std::vector<std::string> OpDesc::OutputArgumentNames() const {
F
Update  
fengjiayi 已提交
456 457 458 459 460 461 462
  std::vector<std::string> retv;
  for (auto &ipt : this->outputs_) {
    retv.insert(retv.end(), ipt.second.begin(), ipt.second.end());
  }
  return retv;
}

Y
Yu Yang 已提交
463 464
void OpDesc::SetOutput(const std::string &param_name,
                       const std::vector<std::string> &args) {
F
fengjiayi 已提交
465 466 467 468
  need_update_ = true;
  this->outputs_[param_name] = args;
}

469 470 471 472 473
void OpDesc::RemoveOutput(const std::string &name) {
  outputs_.erase(name);
  need_update_ = true;
}

474 475 476 477 478
void OpDesc::RemoveInput(const std::string &name) {
  inputs_.erase(name);
  need_update_ = true;
}

479 480 481 482 483 484 485 486 487 488
bool OpDesc::HasProtoAttr(const std::string &name) const {
  auto &op_info = OpInfoMap::Instance();
  if (op_info.Has(desc_.type())) {
    auto op_info_ptr = op_info.Get(desc_.type());
    if (op_info_ptr.HasOpProtoAndChecker()) {
      const proto::OpProto &proto = op_info_ptr.Proto();
      for (int i = 0; i != proto.attrs_size(); ++i) {
        const proto::OpProto::Attr &attr = proto.attrs(i);
        if (attr.name() == name) {
          return true;
L
luotao1 已提交
489 490
        }
      }
L
luotao1 已提交
491 492 493 494 495
    }
  }
  return false;
}

Y
Yu Yang 已提交
496
proto::AttrType OpDesc::GetAttrType(const std::string &name) const {
F
fengjiayi 已提交
497
  auto it = attrs_.find(name);
498 499 500
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound("Attribute %s is not found.", name));
501
  return static_cast<proto::AttrType>(it->second.which() - 1);
F
fengjiayi 已提交
502 503
}

Y
Yu Yang 已提交
504
std::vector<std::string> OpDesc::AttrNames() const {
F
fengjiayi 已提交
505 506 507 508 509 510 511 512
  std::vector<std::string> retv;
  retv.reserve(attrs_.size());
  for (auto &attr : attrs_) {
    retv.push_back(attr.first);
  }
  return retv;
}

513 514 515 516 517
void OpDesc::RemoveAttr(const std::string &name) {
  attrs_.erase(name);
  need_update_ = true;
}

Y
Yu Yang 已提交
518
void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
M
minqiyang 已提交
519 520 521 522 523
  // NOTICE(minqiyang): pybind11 will take the empty list in python as
  // the std::vector<int> type in C++; so we have to change the attr's type
  // here if we meet this issue
  proto::AttrType attr_type = static_cast<proto::AttrType>(v.which() - 1);
  if (attr_type == proto::AttrType::INTS &&
524
      BOOST_GET_CONST(std::vector<int>, v).size() == 0u) {
M
minqiyang 已提交
525
    // Find current attr via attr name and set the correct attribute value
M
minqiyang 已提交
526
    const proto::OpProto::Attr &attr = GetProtoAttr(name);
M
minqiyang 已提交
527 528
    switch (attr.type()) {
      case proto::AttrType::BOOLEANS: {
M
minqiyang 已提交
529 530
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BOOLEANS";
M
minqiyang 已提交
531 532 533 534
        this->attrs_[name] = std::vector<bool>();
        break;
      }
      case proto::AttrType::INTS: {
M
minqiyang 已提交
535 536
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to INTS";
M
minqiyang 已提交
537 538 539
        this->attrs_[name] = std::vector<int>();
        break;
      }
540
      case proto::AttrType::LONGS: {
M
minqiyang 已提交
541 542
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from LONGS to LONGS";
543 544 545
        this->attrs_[name] = std::vector<int64_t>();
        break;
      }
M
minqiyang 已提交
546
      case proto::AttrType::FLOATS: {
M
minqiyang 已提交
547 548
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to FLOATS";
M
minqiyang 已提交
549 550 551 552
        this->attrs_[name] = std::vector<float>();
        break;
      }
      case proto::AttrType::STRINGS: {
M
minqiyang 已提交
553 554
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to STRINGS";
M
minqiyang 已提交
555 556 557 558
        this->attrs_[name] = std::vector<std::string>();
        break;
      }
      case proto::AttrType::BLOCKS: {
M
minqiyang 已提交
559 560
        VLOG(11) << "SetAttr: " << Type() << ", " << name
                 << " from INTS to BLOCKS";
M
minqiyang 已提交
561
        this->SetBlocksAttr(name, std::vector<BlockDesc *>());
M
minqiyang 已提交
562 563
        return;
      }
M
minqiyang 已提交
564
      default:
565 566
        PADDLE_THROW(platform::errors::Unimplemented(
            "Unsupported attribute type (code %d).", attr.type()));
M
minqiyang 已提交
567
    }
M
minqiyang 已提交
568 569
    need_update_ = true;
    return;
M
minqiyang 已提交
570 571
  }

572 573 574
  // In order to set bool attr properly
  if (attr_type == proto::AttrType::INT && HasProtoAttr(name) &&
      GetProtoAttr(name).type() == proto::AttrType::BOOLEAN) {
575
    this->attrs_[name] = static_cast<bool>(BOOST_GET_CONST(int, v));
576 577 578 579
    need_update_ = true;
    return;
  }

F
fengjiayi 已提交
580 581 582 583
  this->attrs_[name] = v;
  need_update_ = true;
}

A
Abhinav Arora 已提交
584 585
void OpDesc::SetBlockAttr(const std::string &name, BlockDesc *block) {
  this->attrs_[name] = block;
F
fengjiayi 已提交
586
  need_update_ = true;
F
fengjiayi 已提交
587 588
}

589 590 591 592 593 594
void OpDesc::SetBlocksAttr(const std::string &name,
                           std::vector<BlockDesc *> blocks) {
  this->attrs_[name] = blocks;
  need_update_ = true;
}

Y
Yu Yang 已提交
595
void OpDesc::SetAttrMap(
F
fengjiayi 已提交
596 597 598 599 600
    const std::unordered_map<std::string, Attribute> &attr_map) {
  attrs_ = attr_map;
  need_update_ = true;
}

Y
Yu Yang 已提交
601
Attribute OpDesc::GetAttr(const std::string &name) const {
F
fengjiayi 已提交
602
  auto it = attrs_.find(name);
603 604 605
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound("Attribute %s is not found.", name));
F
fengjiayi 已提交
606 607 608
  return it->second;
}

M
minqiyang 已提交
609 610 611
const proto::OpProto::Attr &OpDesc::GetProtoAttr(
    const std::string &name) const {
  const proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
M
minqiyang 已提交
612 613 614 615 616 617 618
  for (int i = 0; i != proto.attrs_size(); ++i) {
    const proto::OpProto::Attr &attr = proto.attrs(i);
    if (attr.name() == name) {
      return attr;
    }
  }

619 620
  PADDLE_THROW(platform::errors::NotFound(
      "Attribute %s is not found in proto %s.", name, proto.type()));
M
minqiyang 已提交
621 622
}

Y
yuyang18 已提交
623
Attribute OpDesc::GetNullableAttr(const std::string &name) const {
Y
Fix bug  
yuyang18 已提交
624 625 626 627
  auto it = attrs_.find(name);
  if (it != attrs_.end()) {
    return it->second;
  } else {
Y
yuyang18 已提交
628
    return Attribute();
Y
Fix bug  
yuyang18 已提交
629 630 631
  }
}

G
gongweibao 已提交
632 633
std::vector<int> OpDesc::GetBlocksAttrIds(const std::string &name) const {
  auto it = attrs_.find(name);
634 635 636 637
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
638
  auto blocks = BOOST_GET_CONST(std::vector<BlockDesc *>, it->second);
G
gongweibao 已提交
639 640 641 642 643 644 645 646 647 648

  std::vector<int> ids;
  for (auto n : blocks) {
    ids.push_back(n->ID());
  }

  return ids;
}

int OpDesc::GetBlockAttrId(const std::string &name) const {
F
fengjiayi 已提交
649
  auto it = attrs_.find(name);
650 651 652 653
  PADDLE_ENFORCE_NE(
      it, attrs_.end(),
      platform::errors::NotFound(
          "Attribute `%s` is not found in operator `%s`.", name, desc_.type()));
654
  return BOOST_GET_CONST(BlockDesc *, it->second)->ID();
F
fengjiayi 已提交
655 656
}

Y
Yu Yang 已提交
657
const std::unordered_map<std::string, Attribute> &OpDesc::GetAttrMap() const {
F
fengjiayi 已提交
658 659 660
  return attrs_;
}

Y
Yu Yang 已提交
661
void OpDesc::Rename(const std::string &old_name, const std::string &new_name) {
Y
Yancey1989 已提交
662 663
  RenameInput(old_name, new_name);
  RenameOutput(old_name, new_name);
F
fengjiayi 已提交
664 665 666
  need_update_ = true;
}

Y
Yu Yang 已提交
667 668
void OpDesc::RenameOutput(const std::string &old_name,
                          const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
669 670 671 672
  for (auto &output : outputs_) {
    std::replace(output.second.begin(), output.second.end(), old_name,
                 new_name);
  }
Y
yuyang18 已提交
673 674 675

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
676
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
yuyang18 已提交
677 678 679
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
680 681 682
  need_update_ = true;
}

Y
Yu Yang 已提交
683 684
void OpDesc::RenameInput(const std::string &old_name,
                         const std::string &new_name) {
Y
Yang Yang(Tony) 已提交
685 686 687
  for (auto &input : inputs_) {
    std::replace(input.second.begin(), input.second.end(), old_name, new_name);
  }
Y
Yancey1989 已提交
688 689 690

  auto it = attrs_.find(framework::OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (it != attrs_.end()) {
691
    auto &op_vars = BOOST_GET(std::vector<std::string>, it->second);
Y
Yancey1989 已提交
692 693 694
    std::replace(op_vars.begin(), op_vars.end(), old_name, new_name);
  }

Y
Yang Yang(Tony) 已提交
695 696 697
  need_update_ = true;
}

Y
Yu Yang 已提交
698
struct SetAttrDescVisitor : public boost::static_visitor<void> {
699 700
  explicit SetAttrDescVisitor(proto::OpDesc::Attr *attr) : attr_(attr) {}
  mutable proto::OpDesc::Attr *attr_;
Y
Yu Yang 已提交
701 702 703
  void operator()(int v) const { attr_->set_i(v); }
  void operator()(float v) const { attr_->set_f(v); }
  void operator()(const std::string &v) const { attr_->set_s(v); }
Q
QI JUN 已提交
704 705 706 707 708 709 710

  // Please refer to https://github.com/PaddlePaddle/Paddle/issues/7162
  template <class T,
            class = typename std::enable_if<std::is_same<bool, T>::value>::type>
  void operator()(T b) const {
    attr_->set_b(b);
  }
Y
Yu Yang 已提交
711 712 713 714 715 716 717 718 719 720 721 722 723

  void operator()(const std::vector<int> &v) const {
    VectorToRepeated(v, attr_->mutable_ints());
  }
  void operator()(const std::vector<float> &v) const {
    VectorToRepeated(v, attr_->mutable_floats());
  }
  void operator()(const std::vector<std::string> &v) const {
    VectorToRepeated(v, attr_->mutable_strings());
  }
  void operator()(const std::vector<bool> &v) const {
    VectorToRepeated(v, attr_->mutable_bools());
  }
724 725 726
  void operator()(const std::vector<BlockDesc *> &v) const {
    std::vector<int> blocks_idx;
    for (auto blk : v) {
T
tangwei12 已提交
727
      blocks_idx.push_back(blk->ID());
728 729 730
    }
    VectorToRepeated(blocks_idx, attr_->mutable_blocks_idx());
  }
T
tangwei12 已提交
731 732 733

  void operator()(BlockDesc *desc) const { attr_->set_block_idx(desc->ID()); }

734
  void operator()(int64_t v) const { attr_->set_l(v); }
T
tangwei12 已提交
735 736 737 738 739

  void operator()(const std::vector<int64_t> &v) const {
    VectorToRepeated(v, attr_->mutable_longs());
  }

740 741 742 743
  void operator()(const std::vector<double> &v) const {
    VectorToRepeated(v, attr_->mutable_float64s());
  }

744 745 746 747 748
  void operator()(boost::blank) const {
    PADDLE_THROW(platform::errors::Unavailable(
        "Unsupported calling method of SetAttrDescVisitor object for "
        "`boosst::blank` type."));
  }
Y
Yu Yang 已提交
749 750
};

Y
Yu Yang 已提交
751
void OpDesc::Flush() {
F
fengjiayi 已提交
752
  if (need_update_) {
753
    this->desc_.mutable_inputs()->Clear();
F
fengjiayi 已提交
754
    for (auto &ipt : inputs_) {
755
      auto *input = desc_.add_inputs();
F
fengjiayi 已提交
756 757 758 759
      input->set_parameter(ipt.first);
      VectorToRepeated(ipt.second, input->mutable_arguments());
    }

760
    this->desc_.mutable_outputs()->Clear();
F
fengjiayi 已提交
761
    for (auto &opt : outputs_) {
762
      auto *output = desc_.add_outputs();
F
fengjiayi 已提交
763 764 765 766
      output->set_parameter(opt.first);
      VectorToRepeated(opt.second, output->mutable_arguments());
    }

767
    this->desc_.mutable_attrs()->Clear();
F
fengjiayi 已提交
768
    for (auto &attr : attrs_) {
769
      auto *attr_desc = desc_.add_attrs();
F
fengjiayi 已提交
770 771
      attr_desc->set_name(attr.first);
      attr_desc->set_type(
772
          static_cast<proto::AttrType>(attr.second.which() - 1));
Y
Yu Yang 已提交
773 774
      SetAttrDescVisitor visitor(attr_desc);
      boost::apply_visitor(visitor, attr.second);
F
fengjiayi 已提交
775 776 777 778 779
    }

    need_update_ = false;
  }
}
Y
Yu Yang 已提交
780

Y
Yu Yang 已提交
781
void OpDesc::CheckAttrs() {
782 783 784
  PADDLE_ENFORCE_EQ(Type().empty(), false,
                    platform::errors::PreconditionNotMet(
                        "CheckAttrs() can not be called before type is set."));
Y
Yu Yang 已提交
785 786 787 788 789 790
  auto *checker = OpInfoMap::Instance().Get(Type()).Checker();
  if (checker == nullptr) {
    // checker is not configured. That operator could be generated by Paddle,
    // not by users.
    return;
  }
791
  VLOG(10) << "begin to check attribute of " << Type();
T
tangwei12 已提交
792
  checker->Check(&attrs_);
F
fengjiayi 已提交
793 794
}

H
hong 已提交
795
void OpDesc::InferShape(const BlockDesc &block) {
796 797
  try {
    VLOG(3) << "CompileTime infer shape on " << Type();
H
hong 已提交
798 799 800 801 802 803 804 805
    auto &op_info = OpInfoMap::Instance().Get(this->Type());
    auto *checker = op_info.Checker();
    if (checker != nullptr) {
      // set dafault value here
      VLOG(10) << "begin to check attribute of " << Type();
      checker->Check(&attrs_);
    }
    auto &infer_shape = op_info.infer_shape_;
806 807 808 809
    PADDLE_ENFORCE_EQ(
        static_cast<bool>(infer_shape), true,
        platform::errors::NotFound(
            "Operator %s's infer_shape is not registered.", this->Type()));
810 811 812 813 814 815 816 817 818 819 820 821 822 823 824
    CompileTimeInferShapeContext ctx(*this, block);
    if (VLOG_IS_ON(10)) {
      std::ostringstream sout;
      auto inames = this->InputArgumentNames();
      sout << " From [";
      std::copy(inames.begin(), inames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "] to [";
      auto onames = this->OutputArgumentNames();
      std::copy(onames.begin(), onames.end(),
                std::ostream_iterator<std::string>(sout, ", "));
      sout << "]";
      VLOG(10) << sout.str();
    }
    infer_shape(&ctx);
825
  } catch (platform::EnforceNotMet &exception) {
826
    framework::AppendErrorOpHint(Type(), &exception);
827 828 829 830
    throw std::move(exception);
  } catch (...) {
    std::rethrow_exception(std::current_exception());
  }
Y
Yu Yang 已提交
831 832
}

Y
Yu Yang 已提交
833
void OpDesc::InferVarType(BlockDesc *block) const {
X
Xin Pan 已提交
834 835
  // There are a few places that var type can be set.
  // When VarDesc is created, default set to LOD_TENSOR.
T
tianshuo78520a 已提交
836
  // When output variable is created, default is default set to LOD_TENSOR.
X
Xin Pan 已提交
837 838
  // We limit here to be the only place that operator defines its customized
  // var type inference. Hence, we don't do any "default" setting here.
Y
Yu Yang 已提交
839 840
  auto &info = OpInfoMap::Instance().Get(this->Type());
  if (info.infer_var_type_) {
M
minqiyang 已提交
841
    InferVarTypeContext context(this, block);
M
minqiyang 已提交
842
    info.infer_var_type_(&context);
Y
Yu Yang 已提交
843 844 845
  }
}

846
CompileTimeInferShapeContext::CompileTimeInferShapeContext(
Y
Yu Yang 已提交
847
    const OpDesc &op, const BlockDesc &block)
848 849 850
    : op_(op), block_(block) {}

bool CompileTimeInferShapeContext::HasInput(const std::string &name) const {
851 852 853
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
854 855 856 857 858
  const std::vector<std::string> &input_names = op_.Input(name);
  auto length = input_names.size();
  if (length == 0) {
    return false;
  }
859 860 861 862 863
  PADDLE_ENFORCE_EQ(
      length, 1UL,
      platform::errors::InvalidArgument("Input(%s) should have only one value, "
                                        "but it has %d values now.",
                                        name, length));
864 865 866 867
  return block_.HasVarRecursive(input_names[0]);
}

bool CompileTimeInferShapeContext::HasOutput(const std::string &name) const {
868 869 870
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
871 872 873 874 875
  const std::vector<std::string> &output_names = op_.Output(name);
  auto length = output_names.size();
  if (length == 0) {
    return false;
  }
876 877 878 879 880
  PADDLE_ENFORCE_EQ(length, 1UL,
                    platform::errors::InvalidArgument(
                        "Output(%s) should have only one value, "
                        "but it has %d values now.",
                        name, length));
881 882 883
  return block_.HasVarRecursive(output_names[0]);
}

884 885 886 887
bool CompileTimeInferShapeContext::HasAttr(const std::string &name) const {
  return op_.HasAttr(name);
}

888
bool CompileTimeInferShapeContext::HasInputs(const std::string &name) const {
889 890 891
  if (op_.Inputs().find(name) == op_.Inputs().end()) {
    return false;
  }
892 893 894 895 896 897 898 899 900 901
  const std::vector<std::string> &input_names = op_.Input(name);
  if (input_names.empty()) {
    return false;
  }
  for (auto &input : input_names) {
    if (!block_.HasVarRecursive(input)) return false;
  }
  return true;
}

902 903
bool CompileTimeInferShapeContext::HasOutputs(const std::string &name,
                                              bool allow_null) const {
904 905 906
  if (op_.Outputs().find(name) == op_.Outputs().end()) {
    return false;
  }
907 908 909 910
  const std::vector<std::string> &output_names = op_.Output(name);
  if (output_names.empty()) {
    return false;
  }
911 912 913 914 915 916 917 918 919 920
  if (allow_null) {
    for (auto &output : output_names) {
      if (block_.HasVarRecursive(output)) return true;
    }
    return false;
  } else {
    for (auto &output : output_names) {
      if (!block_.HasVarRecursive(output)) return false;
    }
    return true;
921 922 923 924 925 926 927
  }
}

AttrReader CompileTimeInferShapeContext::Attrs() const {
  return AttrReader(op_.GetAttrMap());
}

H
hong 已提交
928
std::vector<std::string> CompileTimeInferShapeContext::Inputs(
929 930 931 932
    const std::string &name) const {
  return op_.Input(name);
}

H
hong 已提交
933
std::vector<std::string> CompileTimeInferShapeContext::Outputs(
934 935 936 937
    const std::string &name) const {
  return op_.Output(name);
}

F
fengjiayi 已提交
938
std::vector<DDim> CompileTimeInferShapeContext::GetRepeatedDims(
F
fengjiayi 已提交
939 940
    const std::string &name) const {
  auto var = block_.FindVarRecursive(name);
941 942
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
943 944 945 946
  std::vector<DDim> res;
  try {
    auto shapes = var->GetShapes();
    for (const auto &s : shapes) {
947
      res.push_back(s.empty() ? phi::make_ddim({0UL}) : phi::make_ddim(s));
F
fengjiayi 已提交
948 949
    }
  } catch (...) {
M
minqiyang 已提交
950
    VLOG(5) << "GetRepeatedDim of variable " << name << " error.";
F
fengjiayi 已提交
951 952 953
    std::rethrow_exception(std::current_exception());
  }
  return res;
954 955 956 957
}

void CompileTimeInferShapeContext::SetDim(const std::string &name,
                                          const DDim &dim) {
F
fengjiayi 已提交
958
  block_.FindVarRecursive(name)->SetShape(vectorize(dim));
959
}
F
fengjiayi 已提交
960 961 962 963

void CompileTimeInferShapeContext::SetRepeatedDims(
    const std::string &name, const std::vector<DDim> &dims) {
  auto var = block_.FindVarRecursive(name);
964 965
  PADDLE_ENFORCE_NOT_NULL(
      var, platform::errors::NotFound("Variable %s is not found.", name));
F
fengjiayi 已提交
966
  std::vector<std::vector<int64_t>> dim_vec(dims.size());
967
  std::transform(dims.begin(), dims.end(), dim_vec.begin(), phi::vectorize<>);
F
fengjiayi 已提交
968
  var->SetShapes(dim_vec);
969
}
F
fengjiayi 已提交
970

971 972
bool CompileTimeInferShapeContext::IsRuntime() const { return false; }

973 974
bool CompileTimeInferShapeContext::IsRunMKLDNNKernel() const { return false; }

975
proto::VarType::Type CompileTimeInferShapeContext::GetVarType(
976 977 978
    const std::string &name) const {
  return block_.FindVarRecursive(name)->GetType();
}
979

F
fengjiayi 已提交
980 981
}  // namespace framework
}  // namespace paddle